hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aeb4e027894850c0655d23c7ce721ab31bed5d19
| 73
|
py
|
Python
|
clib/utils/boolian.py
|
Swall0w/clib
|
46f659783d5a0a6ec5994c3c707c1cc8a7934385
|
[
"MIT"
] | 1
|
2017-08-27T00:01:27.000Z
|
2017-08-27T00:01:27.000Z
|
clib/utils/boolian.py
|
Swall0w/clib
|
46f659783d5a0a6ec5994c3c707c1cc8a7934385
|
[
"MIT"
] | 49
|
2017-08-20T02:09:26.000Z
|
2017-12-31T11:58:27.000Z
|
clib/utils/boolian.py
|
Swall0w/clib
|
46f659783d5a0a6ec5994c3c707c1cc8a7934385
|
[
"MIT"
] | 1
|
2017-12-08T08:31:38.000Z
|
2017-12-08T08:31:38.000Z
|
import random
def randombool():
return bool(random.getrandbits(1))
| 12.166667
| 38
| 0.726027
| 9
| 73
| 5.888889
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016393
| 0.164384
| 73
| 5
| 39
| 14.6
| 0.852459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
4e27fbd3b1752dd502e2cfd605221c65efcf931b
| 25,799
|
py
|
Python
|
py4sos/transactional.py
|
manuGil/py4sos
|
ffd9d59847ba99e93dcdff883df18dac6d209bbe
|
[
"Apache-2.0"
] | null | null | null |
py4sos/transactional.py
|
manuGil/py4sos
|
ffd9d59847ba99e93dcdff883df18dac6d209bbe
|
[
"Apache-2.0"
] | null | null | null |
py4sos/transactional.py
|
manuGil/py4sos
|
ffd9d59847ba99e93dcdff883df18dac6d209bbe
|
[
"Apache-2.0"
] | null | null | null |
"""
Function for the SOS Transactional profile.
This set of function format requests to publish and handle data in a SOS using a RESTful API.
Requests need to be passed as the body of a HTTP request to the SOS server.
When more than one syntax is allowed, requests as passed using XML version 2.0
Author: Manuel G. Garcia
Created: 23-05-2017
"""
# TODO: give better names to URIs when inserting sensors
def insertSensor(offering, procedure, foi, sensor_type):
"""
Prepares the body of a InsertSensor request for JSON biding.
:param offering: an instance of class Offering.Type object.
:param Procedure: instance of class Procedure. type object.
:param foi: feature of interest. Instance of FoI
:param sensor_type: SensorType object
:return: valid body for an InsertSensor request.
"""
# for JSON double quotes: \", or \u0022
# specify procedure ID:
procedureID = 'http://www.geosmartcity.nl/test/procedure/' + str(procedure.pid) # URL format
shortName = 'short name' #string
longName = 'long name' #string
# Offering values
offName = offering.name #Offering name, double quoted
offID = offering.fullId #URL format of full id
if foi != None: # check if feature of interest should be declare
featureID = 'http://www.geosmartcity.nl/test/featureOfInterest/' + str(foi.fid) # URL format
cordX = foi.x # longitude degrees, float
cordY = foi.y # latitude degrees, float
height = foi.z # altitude in meters, float
h_unit = foi.Hunit # units for horizontal coordinates
z_unit = foi.Vunit # units for altitude
else:
pass
op_name = procedure.name
ObsProp = procedure.defn # URL,
obs_types= []
output_list = '' # output list element for describe procedure
properties_list = []
for a in sensor_type.pattern["attributes"]:
ObsPropName = '\"' + a[0] + '\"' # attribute name
# print(ObsPropName)
unit_name = sensor_type.om_types[a[1]] # om type
magnitud = a # ??
obs_name = ObsPropName.replace('\"', '')
obs_name = "".join(obs_name.split())# observable property name
output = '<sml:output name=' + ObsPropName + '><swe:Quantity definition=' + '\"' + (procedure.url + obs_name )+ '\"' + '></swe:Quantity></sml:output>'
output_list = output_list + output
properties_list.append(procedure.url + obs_name) # add property identifier to the list.
# prepare list of measurement types
# A sensor can not registry duplicated sensor types.
this_type = "http://www.opengis.net/def/observationType/OGC-OM/2.0/"+unit_name
if this_type not in obs_types: # when new type appears
obs_types.append(this_type)
else:
continue
# Unit of measurement:
unit_name = '\"' + procedure.name + '\"' # double quoted string
# unit = omType # one of the MO measurement types
body = {
"request" : "InsertSensor",
"service" : "SOS",
"version" : "2.0.0",
"procedureDescriptionFormat" : "http://www.opengis.net/sensorML/1.0.1",
"procedureDescription" : '<sml:SensorML xmlns:swes=\"http://www.opengis.net/swes/2.0\" xmlns:sos=\"http://www.opengis.net/sos/2.0\" xmlns:swe=\"http://www.opengis.net/swe/1.0.1\" xmlns:sml=\"http://www.opengis.net/sensorML/1.0.1\" xmlns:gml=\"http://www.opengis.net/gml\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" version=\"1.0.1\"><sml:member>' + '<sml:System><sml:identification><sml:IdentifierList><sml:identifier name=\"uniqueID\"><sml:Term definition=\"urn:ogc:def:identifier:OGC:1.0:uniqueID\"><sml:value>' + procedureID + '</sml:value></sml:Term></sml:identifier><sml:identifier name=\"longName\"><sml:Term definition=\"urn:ogc:def:identifier:OGC:1.0:longName\"><sml:value>' + longName + '</sml:value></sml:Term></sml:identifier><sml:identifier name=\"shortName\"><sml:Term definition=\"urn:ogc:def:identifier:OGC:1.0:shortName\"><sml:value>' + shortName + '</sml:value></sml:Term></sml:identifier></sml:IdentifierList></sml:identification><sml:capabilities name=\"offerings\"><swe:SimpleDataRecord><swe:field name=' + offName + '><swe:Text definition=\"urn:ogc:def:identifier:OGC:offeringID\"><swe:value>'+ offID + '</swe:value></swe:Text></swe:field></swe:SimpleDataRecord></sml:capabilities><sml:capabilities name=\"featuresOfInterest\"><swe:SimpleDataRecord><swe:field name=\"featureOfInterestID\"><swe:Text><swe:value>'+ featureID + '</swe:value></swe:Text></swe:field></swe:SimpleDataRecord></sml:capabilities><sml:position name=\"sensorPosition\"><swe:Position referenceFrame=\"urn:ogc:def:crs:EPSG::4326\"><swe:location><swe:Vector gml:id=\"STATION_LOCATION\"><swe:coordinate name=\"easting\"><swe:Quantity axisID=\"x\"><swe:uom code=\"degree\"/><swe:value>'+ str(cordX) + '</swe:value></swe:Quantity></swe:coordinate><swe:coordinate name=\"northing\"><swe:Quantity axisID=\"y\"><swe:uom code=' + '\"' + h_unit + '\"' + ' /><swe:value>' + str(cordY) + '</swe:value></swe:Quantity></swe:coordinate><swe:coordinate name=\"altitude\"><swe:Quantity axisID=\"z\"><swe:uom code=' + '\"' + z_unit + '\"' + '/><swe:value>' + str(height) + '</swe:value></swe:Quantity></swe:coordinate></swe:Vector></swe:location></swe:Position></sml:position><sml:inputs><sml:InputList><sml:input name=' + '\"' + op_name + '\"' + '><swe:ObservableProperty definition=' + '\"' + ObsProp + '\"' + '/></sml:input></sml:InputList></sml:inputs><sml:outputs><sml:OutputList>' +
output_list + '</sml:OutputList></sml:outputs></sml:System></sml:member></sml:SensorML>',
"observableProperty": properties_list, #
"observationType": obs_types,
"featureOfInterestType" : "http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint"}
return body
def insertObservation(observation, foi, to_offering, with_procedure, observed_property=str, geom=True):
'''
Prepares the body of InsertObservation request using JSON binding
:param Offering: pre-existing offering in the OSO
:param to_procedure: existing procedure for the observation
:param observation: observation object
:param observed_property: property to which this observation belongs to
:return: body for a insert observatio request in JSON
'''
# Observation offering
# a URI to the offering description
offId = to_offering.id # of any type
offering = "http://www.geosmartcity.nl/test/offering/" + str(offId)
# Observation ID
obsId = observation.id # of any type
observationId = "http://www.geosmartcity.nl/test/observation/" + str(obsId)
# Selected measurement type.
omtype = observation.uom
mtype = "http://www.opengis.net/def/observationType/OGC-OM/2.0/" + omtype
# Procedure
# The procedure should be previously declared in 'InsertSensor'
# An URI to the procedure description. Any type of URL
proc = "http://www.geosmartcity.nl/test/procedure/" + str(with_procedure.pid)
# declared in procedure description
# One of any type of URI
observedProp = "http://www.geosmartcity.nl/test/observableProperty/" + "".join(observed_property.split())
# ID for Feature of Interest for current observation
# Of any type URI
featureID = "http://www.geosmartcity.nl/test/featureOfInterest/" + str(foi.fid)
featureName= "Name for " + str(foi.fid) # string identifying the name of the feature of interest
# Geometry type for the feature of interest. Most of the time 'Point'.
featureType = 'Point' # one of any of the types from the Simple Access Feature Model
# geometry property of feature of interest.
featureCord = [foi.y, foi.x] # Latitude, Longitude := (Y, X)
#Phenomenon, Declare unit of measurements and values.
# Elements should be in accordance with the MO type.
# For most real time observations PhenomenonTime and resultTime are (practically) the same.
phenomenonTime = observation.phTime # Time at which observation started
resultTime = observation.rTime # Time at which result of observation was generates
# Unit of measurement for observation value, when declaring a OM_Measurement
mag = observation.unit
# for category observation
codespace = 'codespace'
# Observed value for the declared time
# Should be of proper data type for OM type
phenomenonValue = observation.Value #example for OM_Measurement
result = ''
# More conditions are need for other OM types
if omtype == "OM_Measurement":
result = {"uom": mag,
"value": phenomenonValue}
elif omtype == "OM_CategoryObservation":
result = {"codespace": codespace,
"value": phenomenonValue}
elif omtype == "OM_GeometryObservation":
result = {"type": "Point",
"coordinates": [foi.x, foi.y]}
else: # CountObservation, TruthObservation, TextObservation
result = phenomenonValue
if geom is True:
# prepare body including feature of interest declaration
body = {
"request": "InsertObservation",
"service": "SOS",
"version": "2.0.0",
"offering": offering,
"observation": { # optionally a list of observation.
"identifier": {
"value": observationId,
"codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
},
"type": mtype,
"procedure": proc,
"observedProperty": observedProp, #One defined in procedure description
"featureOfInterest": {
"identifier": {
"value": featureID,
"codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
},
"name": [
{
"value": featureName,
"codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
}
],
"sampledFeature": [
"http://www.52north.org/test/featureOfInterest/world"
],
"geometry": {
"type": featureType,
"coordinates": featureCord ,
"crs": {
"type": "name",
"properties": {
"name": "EPSG:4326"
}
}
}
},
"phenomenonTime": phenomenonTime,
"resultTime": resultTime,
# Result elements depend of type of MO
"result":
result
}
}
else:
# prepare body WITHOUT feature of interest declaration
body = {
"request": "InsertObservation",
"service": "SOS",
"version": "2.0.0",
"offering": offering,
"observation": { # optionally a list of observation.
"identifier": {
"value": observationId,
"codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
},
"type": mtype,
"procedure": proc,
"observedProperty": observedProp, # One defined in procedure description
"featureOfInterest": featureID,
"phenomenonTime": phenomenonTime,
"resultTime": resultTime,
# Result elements depend of type of MO
"result":
result
}
}
# Return dictionary with InsertObservation elements
return body
def insertObservationSP(observation, foi, to_offering, with_procedure, observed_property=str):
'''
Prepares the body of InsertObservation request using the Spatil Profile for JSON binding.
Every observation must have a geometry
:param to_offering: pre-existing offering in the SOS
:param foi: contains location of sensor
:param with_procedure: existing procedure for the observation
:param observation: observation object
:param observed_property: property to which this observation belongs to
:return: body for a insert observation with spatial profile
'''
# Observation offering
# a URI to the offering description
offId = to_offering.id # of any type
offering = "http://www.geosmartcity.nl/test/offering/" + str(offId)
# Observation ID
obsId = observation.id # of any type
observationId = "http://www.geosmartcity.nl/test/observation/" + str(obsId)
# Selected measurement type.
omtype = observation.uom
mtype = "http://www.opengis.net/def/observationType/OGC-OM/2.0/" + omtype
# Procedure
# The procedure should be previously declared in 'InsertSensor'
# An URI to the procedure description. Any type of URL
proc = "http://www.geosmartcity.nl/test/procedure/" + str(with_procedure.pid)
# declared in procedure description
# One of any type of URI
observedProp = "http://www.geosmartcity.nl/test/observableProperty/" + "".join(observed_property.split())
# Geometry type for the feature of interest. Most of the time 'Point'.
featureType = 'Point' # one of any of the types from the Simple Access Feature Model
# ID for Feature of Interest for current observation
# Of any type URI
# Removed in the Spatial profile to allow auto-generations
featureID = "http://www.geosmartcity.nl/test/featureOfInterest/" + str(foi.fid)
featureName= "Name for " + str(foi.fid) # string identifying the name of the feature of interest
# Sampling Geometry
featureCord = [foi.y, foi.x] # Latitude, Longitude := (Y, X)
#Phenomenon, Declare unit of measurements and values.
# Elements should be in accordance with the MO type.
# For most real time observations PhenomenonTime and resultTime are (practically) the same.
phenomenonTime = observation.phTime # Time at which observation started
resultTime = observation.rTime # Time at which result of observation was generates
# Unit of measurement for observation value, when declaring a OM_Measurement
mag = observation.unit
# for category observation
codespace = 'codespace'
# Observed value for the declared time
# Should be of proper data type for OM type
phenomenonValue = observation.Value #example for OM_Measurement
result = ''
# More conditions are need for other OM types
if omtype == "OM_Measurement":
result = {"uom": mag,
"value": phenomenonValue}
elif omtype == "OM_CategoryObservation":
result = {"codespace": codespace,
"value": phenomenonValue}
elif omtype == "OM_GeometryObservation":
result = {"type": "Point",
"coordinates": [foi.x, foi.y]}
else: # CountObservation, TruthObservation, TextObservation
result = phenomenonValue
# prepare body with mandatory geometry 'parameter'
body = {
"request": "InsertObservation",
"service": "SOS",
"version": "2.0.0",
"offering": offering,
"observation": { # optionally a list of observation.
"identifier": {
"value": observationId,
"codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
},
"type": mtype,
"procedure": proc,
# Extra parameter reporting current location
"parameter": {
"NamedValue": {
"name": "http://www.opengis.net/def/param-name/OGC-OM/2.0/samplingGeometry",
"value": {
"type": featureType,
"coordinates": featureCord
}
}
},
"observedProperty": observedProp, # One defined in procedure description
"featureOfInterest": {
"identifier": {
"value": featureID,
"codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
},
"name": [
{
"value": featureName,
"codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
}
],
"sampledFeature": [
"http://www.52north.org/test/featureOfInterest/world"
],
"geometry": {
"type": featureType,
"coordinates": featureCord,
"crs": {
"type": "name",
"properties": {
"name": "EPSG:4326"
}
}
}
},
"phenomenonTime": phenomenonTime,
"resultTime": resultTime,
# Result elements depend of type of MO
"result":
result
}
}
# Return dictionary with InsertObservation elements
return body
def insertMobileSensor(offering, procedure, foi, sensor_type):
"""
Prepares the body of a InsertSensor request to register a mobile sensor. Based on SensorML 2.0.
Allowed sensor types: insitu-fixed, insitu-mobile.
:param offering: an instance of class Offering.Type object.
:param procedure: instance of class Procedure. type object.
:param foi: feature of interest. Instance of FoI
:param sensor_type: SensorType object
:return: valid body for an InsertSensor request.
"""
# TODO: extend to chose among all sensor types, or make type a parameter
# for JSON double quotes: \", or \u0022
# specify procedure ID:
procedureID = 'http://www.geosmartcity.nl/test/procedure/' + str(procedure.pid) # URL format
shortName = 'short name' #string
longName = 'long name' #string
# Offering values
offName = offering.name #Offering name, double quoted
offID = offering.fullId #URL format of full id
if foi != None: # check if feature of interest should be declare
featureID = 'http://www.geosmartcity.nl/test/featureOfInterest/' + str(foi.fid) # URL format
cordX = foi.x # longitude degrees, float
cordY = foi.y # latitude degrees, float
height = foi.z # altitude in meters, float
h_unit = foi.Hunit # units for horizontal coordinates
z_unit = foi.Vunit # units for altitude
else:
pass
op_name = procedure.name
ObsProp = procedure.defn # URL,
# print(ObsProp)
obs_types= []
output_list = '' # output list element for describe procedure
properties_list = []
for a in sensor_type.pattern["attributes"]:
ObsPropName = '\"' + a[0] + '\"' # attribute name
# print(ObsPropName)
unit_name = sensor_type.om_types[a[1]] # om type
obs_name = ObsPropName.replace('\"', '')
obs_name = "".join(obs_name.split())# observable property name
# TODO: check complience of <Output> types in procedure definition and OM_types in sensorML
output = '<sml:output name=' + "".join(ObsPropName.split()) + '><swe:Quantity definition=' + '\"' + (procedure.url + obs_name ) + '\"' + '> <swe:uom code=\"'+ unit_name + '\"/></swe:Quantity></sml:output>'
output_list = output_list + output
properties_list.append(procedure.url + obs_name) # add property identifier to the list.
# prepare list of measurement types
# A sensor can not registry duplicated sensor types.
this_type = "http://www.opengis.net/def/observationType/OGC-OM/2.0/"+ unit_name
if this_type not in obs_types: # when new type appears
obs_types.append(this_type)
else:
continue
# Unit of measurement:
unit_name = '\"' + procedure.name + '\"' # double quoted string
# unit = omType # one of the MO measurement types
# Output name=, does not accept white spaces.
# Identifier for first procedure works as parent, it cannot be the same as the new procedure.
# AttachedTo parameter is not compulsory. # TODO: procedure description should modified to match a desired data model.
body = {
"request" : "InsertSensor",
"service" : "SOS",
"version" : "2.0.0",
"procedureDescriptionFormat" : "http://www.opengis.net/sensorml/2.0",
"procedureDescription": '<sml:PhysicalSystem gml:id=\"sensor9\" xmlns:swes=\"http://www.opengis.net/swes/2.0\" xmlns:sos=\"http://www.opengis.net/sos/2.0\" xmlns:swe=\"http://www.opengis.net/swe/2.0\" ' + 'xmlns:sml=\"http://www.opengis.net/sensorml/2.0\" xmlns:gml=\"http://www.opengis.net/gml/3.2\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:gco=\"http://www.isotc211.org/2005/gco\" xmlns:gmd=\"http://www.isotc211.org/2005/gmd\"><gml:identifier codeSpace=\"uniqueID\">' + procedureID + '</gml:identifier><sml:identification><sml:IdentifierList><sml:identifier><sml:Term definition=\"urn:ogc:def:identifier:OGC:1.0:longName\"><sml:label>longName</sml:label><sml:value>' + longName + '</sml:value></sml:Term></sml:identifier><sml:identifier><sml:Term definition=\"urn:ogc:def:identifier:OGC:1.0:shortName\"><sml:label>shortName</sml:label><sml:value>' + shortName + '</sml:value></sml:Term></sml:identifier></sml:IdentifierList></sml:identification><sml:capabilities name=\"offerings\"><sml:CapabilityList><sml:capability name=\"offeringID\"><swe:Text definition=\"urn:ogc:def:identifier:OGC:offeringID\"><swe:label>offeringID</swe:label><swe:value>' + offID + '</swe:value></swe:Text></sml:capability></sml:CapabilityList></sml:capabilities>' + '<sml:capabilities name=\"metadata\"><sml:CapabilityList><sml:capability name=\"insitu\"><swe:Boolean definition=\"insitu\"><swe:value>true</swe:value></swe:Boolean> </sml:capability><sml:capability name=\"mobile\"><swe:Boolean definition=\"mobile\"><swe:value>true</swe:value></swe:Boolean></sml:capability></sml:CapabilityList></sml:capabilities>' + '<sml:featuresOfInterest><sml:FeatureList definition=\"http://www.opengis.net/def/featureOfInterest/identifier\"><swe:label>featuresOfInterest</swe:label><sml:feature xlink:href=\"' + featureID + '\"/></sml:FeatureList></sml:featuresOfInterest><sml:inputs><sml:InputList><sml:input name=' + '\"' + op_name + '\"' + '><sml:ObservableProperty definition=' + '\"' + ObsProp + '\"' + ' /></sml:input></sml:InputList></sml:inputs><sml:outputs><sml:OutputList>' + output_list + '</sml:OutputList></sml:outputs><sml:position><swe:Vector referenceFrame=\"urn:ogc:def:crs:EPSG::4326\"><swe:coordinate name=\"easting\"><swe:Quantity axisID=\"x\"><swe:uom code=' + '\"'+ h_unit + '\"' + '/><swe:value>' +
str(cordX) + '</swe:value></swe:Quantity></swe:coordinate><swe:coordinate name=\"northing\"><swe:Quantity axisID=\"y\"><swe:uom code=\"' + h_unit + '\" /><swe:value>' +
str(cordY) + '</swe:value></swe:Quantity></swe:coordinate><swe:coordinate name=\"altitude\"><swe:Quantity axisID=\"z\"><swe:uom code=\"' + z_unit + '\" /><swe:value>' + str(
height) + '</swe:value></swe:Quantity></swe:coordinate></swe:Vector></sml:position></sml:PhysicalSystem>',
"observableProperty": properties_list, #
"observationType": obs_types,
"featureOfInterestType": "http://www.opengis.net/def/samplingFeatureType/OGC-OM/2.0/SF_SamplingPoint"}
return body
def insertComplexObservation():
"""
:return:
"""
# To use conplex observation:
# 1. Register sensor with with obsevation type 'OM_ComplexObservation'
# 2. Insert observation using the Complex observation request body.
return
def deleteSensor(procedure_id=str): # TODO: Need to be completed
'''
Prepares the body of a delete sensor request for the JSON binding.
:param procedure_id_id: Identifier of the procedure URI.
'''
body = {
"request": "DeleteSensor",
"service": "SOS",
"version": "2.0.0",
"procedure": procedure_id
}
return body
def deleteObservation(observation_uri): # TODO: To be completed
'''
:param observation_uri: URL identifying an observation.
'''
def main():
import wrapper
import data_loader
dir = 'c:/sos_santander/raw_data/sample/'
f_name = "santander_example_data.json"
f_name2 = "data_stream-2016-07-01T080007.json"
h_dir = 'c:/Temp/hist_temp/'
url = 'http://130.89.217.201:8080/sos-4.4/service'
token = 'TWFudWVsIEdhcmNpYQ=='
f = wrapper.FoI('m', 'm', (1,2,3), 'my/feature/')
tsensor = wrapper.SensorType('light')
off = wrapper.Offering('my/domain/', '1', 'offering1')
proc = wrapper.Procedure('1', 'procedure1', 'my/procedure', 'lux')
r = insertMobileSensor(off, proc, f, tsensor)
print(r)
# ms =wrapper.sosPost(r, url, token)
if __name__ == '__main__':
main()
| 49.328872
| 2,433
| 0.609791
| 2,869
| 25,799
| 5.440223
| 0.146741
| 0.021976
| 0.024218
| 0.029408
| 0.79818
| 0.781266
| 0.770695
| 0.750769
| 0.738852
| 0.723539
| 0
| 0.01031
| 0.25939
| 25,799
| 523
| 2,434
| 49.328872
| 0.806563
| 0.279972
| 0
| 0.761589
| 0
| 0.099338
| 0.458152
| 0.217237
| 0.013245
| 0
| 0
| 0.007648
| 0
| 1
| 0.02649
| false
| 0.006623
| 0.006623
| 0
| 0.05298
| 0.003311
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9d685e065711912d2e67c37062c3662880251d5d
| 3,333
|
py
|
Python
|
030_ClassificationWebsite/apps/coherence/titles.py
|
cogeorg/RegulatoryComplexity_Public
|
c9578ce012ba1e84dbebb029e30d98eff3430fd6
|
[
"Apache-2.0"
] | null | null | null |
030_ClassificationWebsite/apps/coherence/titles.py
|
cogeorg/RegulatoryComplexity_Public
|
c9578ce012ba1e84dbebb029e30d98eff3430fd6
|
[
"Apache-2.0"
] | null | null | null |
030_ClassificationWebsite/apps/coherence/titles.py
|
cogeorg/RegulatoryComplexity_Public
|
c9578ce012ba1e84dbebb029e30d98eff3430fd6
|
[
"Apache-2.0"
] | null | null | null |
from flask import request, jsonify, redirect, url_for,flash, render_template, request, session, abort
from views import app
@app.route('/title_0_Y')
def title_0_Y():
return render_template('PreClass/title_0.html')
@app.route('/title_1_Y')
def title_1_Y():
return render_template('PreClass/title_1.html')
@app.route('/title_2_Y')
def title_2_Y():
return render_template('PreClass/title_2.html')
@app.route('/title_3_Y')
def title_3_Y():
return render_template('PreClass/title_3.html')
@app.route('/title_4_Y')
def title_4_Y():
return render_template('PreClass/title_4.html')
@app.route('/title_5_Y')
def title_5_Y():
return render_template('PreClass/title_5.html')
@app.route('/title_6_Y')
def title_6_Y():
return render_template('PreClass/title_6.html')
@app.route('/title_7_Y')
def title_7_Y():
return render_template('PreClass/title_7.html')
@app.route('/title_8_Y')
def title_8_Y():
return render_template('PreClass/title_8.html')
@app.route('/title_9_Y')
def title_9_Y():
return render_template('PreClass/title_9.html')
@app.route('/title_10_Y')
def title_10_Y():
return render_template('PreClass/title_10.html')
@app.route('/title_11_Y')
def title_11_Y():
return render_template('PreClass/title_11.html')
@app.route('/title_12_Y')
def title_12_Y():
return render_template('PreClass/title_12.html')
@app.route('/title_13_Y')
def title_13_Y():
return render_template('PreClass/title_13.html')
@app.route('/title_14_Y')
def title_14_Y():
return render_template('PreClass/title_14.html')
@app.route('/title_15_Y')
def title_15_Y():
return render_template('PreClass/title_15.html')
@app.route('/title_16_Y')
def title_16_Y():
return render_template('PreClass/title_16.html')
@app.route('/title_0')
def title_0():
return render_template('Original/title_0.html')
@app.route('/title_1')
def title_1():
return render_template('Original/title_1.html')
@app.route('/title_2')
def title_2():
return render_template('Original/title_2.html')
@app.route('/title_3')
def title_3():
return render_template('Original/title_3.html')
@app.route('/title_4')
def title_4():
return render_template('Original/title_4.html')
@app.route('/title_5')
def title_5():
return render_template('Original/title_5.html')
@app.route('/title_6')
def title_6():
return render_template('Original/title_6.html')
@app.route('/title_7')
def title_7():
return render_template('Original/title_7.html')
@app.route('/title_8')
def title_8():
return render_template('Original/title_8.html')
@app.route('/title_9')
def title_9():
return render_template('Original/title_9.html')
@app.route('/title_10')
def title_10():
return render_template('Original/title_10.html')
@app.route('/title_11')
def title_11():
return render_template('Original/title_11.html')
@app.route('/title_12')
def title_12():
return render_template('Original/title_12.html')
@app.route('/title_13')
def title_13():
return render_template('Original/title_13.html')
@app.route('/title_14')
def title_14():
return render_template('Original/title_14.html')
@app.route('/title_15')
def title_15():
return render_template('Original/title_15.html')
@app.route('/title_16')
def title_16():
return render_template('Original/title_16.html')
| 23.307692
| 102
| 0.726373
| 528
| 3,333
| 4.25947
| 0.075758
| 0.217875
| 0.196532
| 0.249444
| 0.78835
| 0.574478
| 0.353046
| 0
| 0
| 0
| 0
| 0.048436
| 0.108011
| 3,333
| 142
| 103
| 23.471831
| 0.708039
| 0
| 0
| 0
| 0
| 0
| 0.314431
| 0.218422
| 0
| 0
| 0
| 0
| 0
| 1
| 0.326923
| true
| 0
| 0.019231
| 0.326923
| 0.673077
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d1979045dfbbbcac8d34e125d8ef7f411c44573e
| 7,164
|
py
|
Python
|
hoft/core/decorators.py
|
sys-git/hoft
|
a59bd3f38a258eb6d7f56a9a79034159b18fd6a4
|
[
"MIT"
] | null | null | null |
hoft/core/decorators.py
|
sys-git/hoft
|
a59bd3f38a258eb6d7f56a9a79034159b18fd6a4
|
[
"MIT"
] | 323
|
2017-09-13T07:20:51.000Z
|
2022-03-31T12:30:24.000Z
|
hoft/core/decorators.py
|
sys-git/hoft
|
a59bd3f38a258eb6d7f56a9a79034159b18fd6a4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: latin-1 -*-
#
# Brief description
# @module hoft.core.decorators
# @version 0.1
# @copyright (c) 2017-present Francis Horsman.
from inspect import getargspec, getcallargs
import six
from hoft.core.parsers_in import parse_all_in_args
from hoft.core.parsers_sig import parse_all_sig_args
from hoft.core.utils import raise_exc
def analyse_in(*parse_args, **parse_kwargs):
"""
Decorator for methods (to analyse) the args and kwargs of the decorated callable.
This method does not modify the args or kwargs in any way.
Deprecated. Will be removed in a future version, use `analyse_sig` instead.
:param parse_args:
A list of callables which accept two values only:
These callables will be passed the target function's argument at the same position as - the
callable is in the decorator's arguments list and the index of the argument.
If callable==`IGNORE`, then the decorated function's arg is not parsed.
:param parse_kwargs:
A dictionary of name, callables. The name represents the target function's kwarg that
will be passed to the callable. The callable receives the name,
value and a boolean representing if the name is present in the kwargs:
ie: `def my_func(name, value, name_in_decorated_funcs_passed_kwargs)`.
:param bool parse_kwargs['_fail_fast_']:
True: Fail on the first exception raised by any supplied callable.
:param bool parse_kwargs['_on_error_']:
Callable or type to be called when an exception is found
in a supplied callable, if the type is an exception or subclass-of, it will be raised (the
exception constructor should take the same signature as my_func below):
ie: `def my_func(exc, list_of_excs)`.
If the type is not an exception or subclass-of it will be called, it is up to this
callable to raise an exception if required.
:returns:
Decorated function.
:note:
Any exception raised by a supplied callable will have an additional field: `_errors_`.
This is always a list of one or all of the errors encountered during the supplied
callables (depending on the value of the `_fail_fast_` kwargs).
Example:
>>> @hoft.analyse_in(
_a_func(z=1), None, bar=_b_func(x=1, y=2), baz=_validate_baz(), x=None,
_fail_fast_=True, _on_error_=my_func,
)
def _validate_something_decorated(foo, ignored, bar=hoft.IGNORE, baz=None, x=None):
...
"""
def decorator(func):
@six.wraps(func)
def wrapper(*args, **kwargs):
fail_fast = parse_kwargs.pop('_fail_fast_', False)
on_error = parse_kwargs.pop('_on_error_', None)
argspec = getargspec(func)
errors = parse_all_in_args(
parse_args,
parse_kwargs,
args,
kwargs,
argspec,
on_error,
fail_fast,
)
if errors and not fail_fast:
# We have errors to raise which have not already been raised.
exc = errors[0]
raise_exc(
exc=exc.error,
on_error=on_error,
errors=errors,
fail_fast=fail_fast,
force=True,
)
# Call the wrapped function:
return func(*args, **kwargs)
return wrapper
return decorator
def analyse_sig(*parse_args, **parse_kwargs):
"""
Decorator for methods (to analyse) the args and kwargs of the decorated callable.
This method does not modify the args or kwargs in any way.
Preferred method over `analyse_in`.
:param parse_args:
A list of callables which accept two values only:
These callables will be passed the target function's argument at the same position as - the
callable is in the decorator's arguments list and the index of the argument.
If callable==`IGNORE`, then the decorated function's arg is not parsed.
:param parse_kwargs:
A dictionary of name, callables. The name represents the target function's kwarg that
will be passed to the callable. The callable receives the name,
value and a boolean representing if the name is present in the kwargs:
ie: `def my_func(name, value, name_in_decorated_funcs_passed_kwargs)`.
:param bool parse_kwargs['_fail_fast_']:
True: Fail on the first exception raised by any supplied callable.
:param bool parse_kwargs['_on_error_']:
Callable or type to be called when an exception is found
in a supplied callable, if the type is an exception or subclass-of, it will be raised (the
exception constructor should take the same signature as my_func below):
ie: `def my_func(exc, list_of_excs)`.
If the type is not an exception or subclass-of it will be called, it is up to this
callable to raise an exception if required.
:param bool parse_kwargs['_strict_']:
True=Error if all params are not analysed.
:param callable parse_kwargs['_default_']:
Default handler for all not previously analysed arguments.
:returns:
Decorated function.
:note:
Any exception raised by a supplied callable will have an additional field: `_errors_`.
This is always a list of one or all of the errors encountered during the supplied
callables (depending on the value of the `_fail_fast_` kwargs).
Example:
>>> @hoft.analyse_sig(
_a_func(z=1), None, bar=_b_func(x=1, y=2), baz=_validate_baz(), x=None,
_fail_fast_=True, _on_error_=my_func, _strict_=False, _default_=_default_func,
)
def _validate_something_decorated(foo, ignored, bar=hoft.IGNORE, baz=None, x=None):
...
"""
def decorator(func):
@six.wraps(func)
def wrapper(*args, **kwargs):
argspec = getargspec(func)
callargs = getcallargs(func, *args, **kwargs)
strict = parse_kwargs.pop('_strict_', None)
default = parse_kwargs.pop('_default_', None)
fail_fast = parse_kwargs.pop('_fail_fast_', False)
on_error = parse_kwargs.pop('_on_error_', None)
errors = parse_all_sig_args(
parse_args,
parse_kwargs,
args,
kwargs,
argspec,
callargs,
strict,
default,
on_error,
fail_fast,
)
if errors and not fail_fast:
# We have errors to raise which have not already been raised.
exc = errors[0]
raise_exc(
exc=exc.error,
on_error=on_error,
errors=errors,
fail_fast=fail_fast,
force=True,
)
# Call the wrapped function:
return func(*args, **kwargs)
return wrapper
return decorator
| 36.365482
| 99
| 0.624511
| 952
| 7,164
| 4.526261
| 0.183824
| 0.04595
| 0.019494
| 0.023207
| 0.826642
| 0.826642
| 0.826642
| 0.826642
| 0.807612
| 0.807612
| 0
| 0.003019
| 0.306533
| 7,164
| 196
| 100
| 36.55102
| 0.864332
| 0.622278
| 0
| 0.716418
| 0
| 0
| 0.024832
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.089552
| false
| 0
| 0.074627
| 0
| 0.253731
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d1be549f263eaa4aaec76257a75137f8dbc15af2
| 7,015
|
py
|
Python
|
lessons/Chapter1/gallery_section_08.py
|
ratnania/IGA-Python
|
a9d7aa9bd14d4b3f1b12cdfbc2f9bf3c0a68fff4
|
[
"MIT"
] | 6
|
2018-04-27T15:40:17.000Z
|
2020-08-13T08:45:35.000Z
|
lessons/Chapter1/gallery_section_08.py
|
GabrielJie/IGA-Python
|
a9d7aa9bd14d4b3f1b12cdfbc2f9bf3c0a68fff4
|
[
"MIT"
] | 4
|
2021-06-08T22:59:19.000Z
|
2022-01-17T20:36:56.000Z
|
lessons/Chapter1/gallery_section_08.py
|
GabrielJie/IGA-Python
|
a9d7aa9bd14d4b3f1b12cdfbc2f9bf3c0a68fff4
|
[
"MIT"
] | 4
|
2018-10-06T01:30:20.000Z
|
2021-12-31T02:42:05.000Z
|
__all__ = ['assemble_matrix_ex01',
'assemble_vector_ex01'
'assemble_norm_ex01'
]
#==============================================================================
def assemble_matrix_ex01(ne1: 'int', ne2: 'int',
p1: 'int', p2: 'int',
spans_1: 'int[:]', spans_2: 'int[:]',
basis_1: 'double[:,:,:,:]', basis_2: 'double[:,:,:,:]',
weights_1: 'double[:,:]', weights_2: 'double[:,:]',
points_1: 'double[:,:]', points_2: 'double[:,:]',
vector_u: 'double[:,:]',
matrix: 'double[:,:,:,:]'):
# ... sizes
k1 = weights_1.shape[1]
k2 = weights_2.shape[1]
# ...
from numpy import zeros
lcoeffs_u = zeros((p1+1,p2+1))
lvalues_u = zeros((k1, k2))
# ... build matrices
for ie1 in range(0, ne1):
i_span_1 = spans_1[ie1]
for ie2 in range(0, ne2):
i_span_2 = spans_2[ie2]
lvalues_u[ : , : ] = 0.0
lcoeffs_u[ : , : ] = vector_u[i_span_1 : i_span_1+p1+1, i_span_2 : i_span_2+p2+1]
for il_1 in range(0, p1+1):
for il_2 in range(0, p2+1):
coeff_u = lcoeffs_u[il_1,il_2]
for g1 in range(0, k1):
b1 = basis_1[ie1,il_1,0,g1]
for g2 in range(0, k2):
b2 = basis_2[ie2,il_2,0,g2]
lvalues_u[g1,g2] += coeff_u*b1*b2
for il_1 in range(0, p1+1):
for il_2 in range(0, p2+1):
for jl_1 in range(0, p1+1):
for jl_2 in range(0, p2+1):
i1 = i_span_1 - p1 + il_1
j1 = i_span_1 - p1 + jl_1
i2 = i_span_2 - p2 + il_2
j2 = i_span_2 - p2 + jl_2
v = 0.0
for g1 in range(0, k1):
for g2 in range(0, k2):
bi_0 = basis_1[ie1, il_1, 0, g1] * basis_2[ie2, il_2, 0, g2]
bi_x = basis_1[ie1, il_1, 1, g1] * basis_2[ie2, il_2, 0, g2]
bi_y = basis_1[ie1, il_1, 0, g1] * basis_2[ie2, il_2, 1, g2]
bj_0 = basis_1[ie1, jl_1, 0, g1] * basis_2[ie2, jl_2, 0, g2]
bj_x = basis_1[ie1, jl_1, 1, g1] * basis_2[ie2, jl_2, 0, g2]
bj_y = basis_1[ie1, jl_1, 0, g1] * basis_2[ie2, jl_2, 1, g2]
wvol = weights_1[ie1, g1] * weights_2[ie2, g2]
u = lvalues_u[g1,g2]
v += (1. + u**2)*(bi_x * bj_x + bi_y * bj_y) * wvol
matrix[p1+i1, p2+i2, p1+j1-i1, p2+j2-i2] += v
# ...
#==============================================================================
def assemble_vector_ex01(ne1: 'int', ne2: 'int',
p1: 'int', p2: 'int',
spans_1: 'int[:]', spans_2: 'int[:]',
basis_1: 'double[:,:,:,:]', basis_2: 'double[:,:,:,:]',
weights_1: 'double[:,:]', weights_2: 'double[:,:]',
points_1: 'double[:,:]', points_2: 'double[:,:]',
rhs: 'double[:,:]'):
from numpy import sin
from numpy import cos
from numpy import pi
# ... sizes
k1 = weights_1.shape[1]
k2 = weights_2.shape[1]
# ...
# ... build rhs
for ie1 in range(0, ne1):
i_span_1 = spans_1[ie1]
for ie2 in range(0, ne2):
i_span_2 = spans_2[ie2]
for il_1 in range(0, p1+1):
for il_2 in range(0, p2+1):
i1 = i_span_1 - p1 + il_1
i2 = i_span_2 - p2 + il_2
v = 0.0
for g1 in range(0, k1):
for g2 in range(0, k2):
bi_0 = basis_1[ie1, il_1, 0, g1] * basis_2[ie2, il_2, 0, g2]
x1 = points_1[ie1, g1]
x2 = points_2[ie2, g2]
wvol = weights_1[ie1, g1]*weights_2[ie2, g2]
# f = -( 2*x1**3*x2*(x1 - 1)**3*(x2 - 1)*(2*x2 - 1)**2 + 2*x1*x2**3*(x1 - 1)*(2*x1 - 1)**2*(x2 - 1)**3 + 2*x1*(x1 - 1)*(x1**2*x2**2*(x1 - 1)**2*(x2 - 1)**2 + 1) + 2*x2*(x2 - 1)*(x1**2*x2**2*(x1 - 1)**2*(x2 - 1)**2 + 1.))
f = 2*pi**2*(sin(pi*x1)**2*sin(pi*x2)**2 + 1)*sin(pi*x1)*sin(pi*x2) - 2*pi**2*sin(pi*x1)**3*sin(pi*x2)*cos(pi*x2)**2 - 2*pi**2*sin(pi*x1)*sin(pi*x2)**3*cos(pi*x1)**2
v += bi_0 * f * wvol
rhs[i1+p1,i2+p2] += v
# ...
#==============================================================================
def assemble_norm_ex01(ne1: 'int', ne2: 'int',
p1: 'int', p2: 'int',
spans_1: 'int[:]', spans_2: 'int[:]',
basis_1: 'double[:,:,:,:]', basis_2: 'double[:,:,:,:]',
weights_1: 'double[:,:]', weights_2: 'double[:,:]',
points_1: 'double[:,:]', points_2: 'double[:,:]',
vector_u: 'double[:,:]'):
from numpy import sin
from numpy import pi
from numpy import sqrt
# ... sizes
k1 = weights_1.shape[1]
k2 = weights_2.shape[1]
# ...
from numpy import zeros
lcoeffs_u = zeros((p1+1,p2+1))
lvalues_u = zeros((k1, k2))
norm_l2 = 0.
# ...
for ie1 in range(0, ne1):
i_span_1 = spans_1[ie1]
for ie2 in range(0, ne2):
i_span_2 = spans_2[ie2]
lvalues_u[ : , : ] = 0.0
lcoeffs_u[ : , : ] = vector_u[i_span_1 : i_span_1+p1+1, i_span_2 : i_span_2+p2+1]
for il_1 in range(0, p1+1):
for il_2 in range(0, p2+1):
coeff_u = lcoeffs_u[il_1,il_2]
for g1 in range(0, k1):
b1 = basis_1[ie1,il_1,0,g1]
for g2 in range(0, k2):
b2 = basis_2[ie2,il_2,0,g2]
lvalues_u[g1,g2] += coeff_u*b1*b2
v = 0.0
for g1 in range(0, k1):
for g2 in range(0, k2):
wvol = weights_1[ie1, g1] * weights_2[ie2, g2]
x1 = points_1[ie1, g1]
x2 = points_2[ie2, g2]
# u = x1*x2*(1.-x1)*(1.-x2)
u = sin(pi*x1)*sin(pi*x2)
uh = lvalues_u[g1,g2]
v += (u-uh)**2 * wvol
norm_l2 += v
norm_l2 = sqrt(norm_l2)
return norm_l2
# ...
| 38.543956
| 247
| 0.376907
| 938
| 7,015
| 2.608742
| 0.073561
| 0.074377
| 0.085002
| 0.031467
| 0.826727
| 0.804659
| 0.771966
| 0.765836
| 0.725787
| 0.695954
| 0
| 0.125
| 0.438917
| 7,015
| 181
| 248
| 38.756906
| 0.496697
| 0.087812
| 0
| 0.717742
| 0
| 0
| 0.062676
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024194
| false
| 0
| 0.064516
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ae47ad57e68c567abdc36e216a69e568666109a7
| 123,535
|
py
|
Python
|
Account/app/mod_authorization/view_api.py
|
TamSzaGot/mydata-sdk
|
9c8afb75077f0b993819aa534b904501a8112f76
|
[
"MIT"
] | 4
|
2018-04-21T00:46:40.000Z
|
2019-12-03T13:52:03.000Z
|
Account/app/mod_authorization/view_api.py
|
TamSzaGot/mydata-sdk
|
9c8afb75077f0b993819aa534b904501a8112f76
|
[
"MIT"
] | 1
|
2019-01-09T10:45:23.000Z
|
2019-01-09T10:45:23.000Z
|
Account/app/mod_authorization/view_api.py
|
TamSzaGot/mydata-sdk
|
9c8afb75077f0b993819aa534b904501a8112f76
|
[
"MIT"
] | 4
|
2018-04-21T01:12:12.000Z
|
2020-09-24T06:19:29.000Z
|
# -*- coding: utf-8 -*-
"""
__author__ = "Jani Yli-Kantola"
__copyright__ = ""
__credits__ = ["Harri Hirvonsalo", "Aleksi Palomäki"]
__license__ = "MIT"
__version__ = "1.3.0"
__maintainer__ = "Jani Yli-Kantola"
__contact__ = "https://github.com/HIIT/mydata-stack"
__status__ = "Development"
"""
# Import dependencies
from flask import Blueprint, request, json
from flask_restful import Resource, Api
from app.helpers import get_custom_logger, make_json_response, ApiError, validate_json, compare_str_ids, get_utc_time
from app.mod_account.controllers import verify_account_id_match
from app.mod_api_auth.controllers import requires_api_auth_user, requires_api_auth_sdk, get_user_api_key, \
get_sdk_api_key
from app.mod_authorization.schemas import schema_consent_new, schema_consent_status_new, \
schema_consent_status_signed_new
from app.mod_database.controllers import create_event_log_entry
from app.mod_database.models import ServiceLinkRecord, ConsentRecord, ConsentStatusRecord
from app.mod_authorization.controllers import sign_cr, sign_csr, store_cr_and_csr, get_auth_token_data, \
get_last_cr_status, store_csr, get_csrs, get_crs, get_cr, get_last_cr, get_csr
mod_authorization_api = Blueprint('authorization_api', __name__, template_folder='templates')
api = Api(mod_authorization_api)
# create logger with 'spam_application'
logger = get_custom_logger(__name__)
# Resources
class ApiAccountConsent(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def post(self, account_id, source_link_id, sink_link_id):
"""
Constructs Consent Record’s and Consent Status Record’s based on provided payloads for Source and Sink services.
Signs constructed record’s with Account owner’s key.
After signing records are stored.
:param account_id:
:param source_link_id:
:param sink_link_id:
:return: JSON object
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, source_link_id=source_link_id, sink_link_id=sink_link_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
source_link_id = str(source_link_id)
except Exception as exp:
error_title = "Unsupported source_link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("source_link_id: " + source_link_id)
try:
sink_link_id = str(sink_link_id)
except Exception as exp:
error_title = "Unsupported sink_link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("sink_link_id: " + sink_link_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# load JSON
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
validate_json(json_data, schema_consent_new)
# Get elements from payload
# Source
try:
# Consent Record
source_consent_payload = json_data['data']['source']['consent_record_payload']['attributes']
source_consent_cr_id = str(source_consent_payload['common_part']['cr_id'])
source_consent_surrogate_id = str(source_consent_payload['common_part']['surrogate_id'])
source_consent_slr_id = str(source_consent_payload['common_part']['slr_id'])
source_consent_subject_id = str(source_consent_payload['common_part']['subject_id'])
source_consent_role = str(source_consent_payload['common_part']['role'])
source_consent_rs_id = str(source_consent_payload['common_part']['rs_description']['resource_set']['rs_id'])
except Exception as exp:
error_title = "Could not Consent Record data of Source Service"
error_detail = str(exp.__class__.__name__) + " - " + str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.debug("source_consent_cr_id from payload: " + source_consent_cr_id)
logger.debug("source_consent_surrogate_id from payload: " + source_consent_surrogate_id)
logger.debug("source_consent_slr_id from payload: " + source_consent_slr_id)
logger.debug("source_consent_subject_id from payload: " + source_consent_subject_id)
logger.debug("source_consent_role from payload: " + source_consent_role)
logger.debug("source_consent_rs_id from payload: " + source_consent_rs_id)
try:
# Consent Status Record
source_status_payload = json_data['data']['source']['consent_status_record_payload']['attributes']
source_status_record_id = str(source_status_payload['record_id'])
source_status_surrogate_id = str(source_status_payload['surrogate_id'])
source_status_cr_id = str(source_status_payload['cr_id'])
source_status_consent_status = str(source_status_payload['consent_status'])
source_status_iat = int(source_status_payload['iat'])
source_status_prev_record_id = str(source_status_payload['prev_record_id'])
except Exception as exp:
error_title = "Could not Consent Status Record data of Source Service"
error_detail = str(exp.__class__.__name__) + " - " + str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.debug("source_status_record_id from payload: " + source_status_record_id)
logger.debug("source_status_surrogate_id from payload: " + source_status_surrogate_id)
logger.debug("source_status_cr_id from payload: " + source_status_cr_id)
logger.debug("source_status_consent_status from payload: " + source_status_consent_status)
logger.debug("source_status_iat from payload: " + str(source_status_iat))
logger.debug("source_status_prev_record_id from payload: " + source_status_prev_record_id)
try:
logger.info("Verify that Source SLR IDs from path and payload are matching")
compare_str_ids(id=source_link_id, id_to_compare=source_consent_slr_id)
except ValueError as exp:
error_title = "Source Service SLR IDs from path and Consent Record payload are not matching"
error_detail = "SLR ID from path was {} and from payload {}".format(source_link_id, source_consent_slr_id)
logger.error(error_title + " - " + error_detail + ": " + str(exp.message))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Source Service SLR IDs from path and payload are matching")
try:
logger.info("Verify that Source Consent IDs from payload are matching")
compare_str_ids(id=source_consent_cr_id, id_to_compare=source_status_cr_id)
except ValueError as exp:
error_title = "Source Service Consent IDs from Consent Record payload and Consent Status Record payload are not matching"
error_detail = "Consent ID from Consent Record payload was {} and from Consent Status Record payload {}".format(source_consent_cr_id, source_status_cr_id)
logger.error(error_title + " - " + error_detail + ": " + str(exp.message))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Source Service Consent IDs from payload are matching")
try:
logger.info("Verify that Source Surrogate IDs from payload are matching")
compare_str_ids(id=source_consent_surrogate_id, id_to_compare=source_status_surrogate_id)
except ValueError as exp:
error_title = "Source Service Surrogate IDs from Consent Record payload and Consent Status Record payload are not matching"
error_detail = "Surrogate ID from Consent payload was {} and from Consent Status payload {}".format(source_consent_surrogate_id, source_status_surrogate_id)
logger.error(error_title + " - " + error_detail + ": " + str(exp.message))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Source Service Surrogate IDs from payload are matching")
# Sink
try:
# Consent Record
sink_consent_payload = json_data['data']['sink']['consent_record_payload']['attributes']
sink_consent_cr_id = str(sink_consent_payload['common_part']['cr_id'])
sink_consent_surrogate_id = str(sink_consent_payload['common_part']['surrogate_id'])
sink_consent_slr_id = str(sink_consent_payload['common_part']['slr_id'])
sink_consent_subject_id = str(sink_consent_payload['common_part']['subject_id'])
sink_consent_role = str(sink_consent_payload['common_part']['role'])
sink_consent_rs_id = str(sink_consent_payload['common_part']['rs_description']['resource_set']['rs_id'])
except Exception as exp:
error_title = "Could not get Consent Record data of Sink Service"
error_detail = str(exp.__class__.__name__) + " - " + str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.debug("sink_consent_cr_id from payload: " + sink_consent_cr_id)
logger.debug("sink_consent_surrogate_id from payload: " + sink_consent_surrogate_id)
logger.debug("sink_consent_slr_id from payload: " + sink_consent_slr_id)
logger.debug("sink_consent_subject_id from payload: " + sink_consent_subject_id)
logger.debug("sink_consent_role from payload: " + sink_consent_role)
logger.debug("sink_consent_rs_id from payload: " + sink_consent_rs_id)
try:
# Consent Status Record
sink_status_payload = json_data['data']['sink']['consent_status_record_payload']['attributes']
sink_status_record_id = str(sink_status_payload['record_id'])
sink_status_surrogate_id = str(sink_status_payload['surrogate_id'])
sink_status_cr_id = str(sink_status_payload['cr_id'])
sink_status_consent_status = str(sink_status_payload['consent_status'])
sink_status_iat = int(sink_status_payload['iat'])
sink_status_prev_record_id = str(sink_status_payload['prev_record_id'])
except Exception as exp:
error_title = "Could not get Consent Status Record data of Sink Service"
error_detail = str(exp.__class__.__name__) + " - " + str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.debug("sink_status_record_id from payload: " + sink_status_record_id)
logger.debug("sink_status_surrogate_id from payload: " + sink_status_surrogate_id)
logger.debug("sink_status_cr_id from payload: " + sink_status_cr_id)
logger.debug("sink_status_consent_status from payload: " + sink_status_consent_status)
logger.debug("sink_status_iat from payload: " + str(sink_status_iat))
logger.debug("sink_status_prev_record_id from payload: " + sink_status_prev_record_id)
try:
logger.info("Verify that Sink Service SLR IDs from path and payload are matching")
compare_str_ids(id=sink_link_id, id_to_compare=sink_consent_slr_id)
except ValueError as exp:
error_title = "Sink Service SLR IDs from path and Consent Record payload are not matching"
error_detail = "SLR ID from path was {} and from payload {}".format(sink_link_id, sink_consent_slr_id)
logger.error(error_title + " - " + error_detail + ": " + str(exp.message))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Sink Service SLR IDs from path and payload are matching")
try:
logger.info("Verify that Sink Consent IDs from payload are matching")
compare_str_ids(id=sink_consent_cr_id, id_to_compare=sink_status_cr_id)
except ValueError as exp:
error_title = "Sink Service Consent IDs from Consent Record payload and Consent Status Record payload are not matching"
error_detail = "Consent ID from Consent payload was {} and from Consent Status payload {}".format(sink_consent_cr_id, sink_status_cr_id)
logger.error(error_title + " - " + error_detail + ": " + str(exp.message))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Sink Service Consent IDs from payload are matching")
try:
logger.info("Verify that Sink Surrogate IDs from payload are matching")
compare_str_ids(id=sink_consent_surrogate_id, id_to_compare=sink_status_surrogate_id)
except ValueError as exp:
error_title = "Sink Service Surrogate IDs from Consent Record payload and Consent Status Record payload are not matching"
error_detail = "Surrogate ID from Consent payload was {} and from Consent Status payload {}".format(sink_consent_surrogate_id, sink_status_surrogate_id)
logger.error(error_title + " - " + error_detail + ": " + str(exp.message))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Sink Service Surrogate IDs from payload are matching")
######
# Sign Consent Records and Consent Status Records
####
try:
logger.info("Signing Consent Record of Source Service")
source_cr_signed = sign_cr(account_id=account_id, payload=source_consent_payload, endpoint=endpoint)
except Exception as exp:
error_title = "Could not sign Consent Record of Source Service"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record of Source Service signed")
logger.debug("Consent Record of Source Service: " + json.dumps(source_cr_signed))
try:
logger.info("Signing Consent Status Record of Source Service")
source_csr_signed = sign_csr(account_id=account_id, payload=source_status_payload, endpoint=endpoint)
except Exception as exp:
error_title = "Could not sign Consent Status Record of Source Service"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Status Record of Source Service signed")
logger.debug("Consent Status Record of Source Service: " + json.dumps(source_csr_signed))
try:
logger.info("Signing Consent Record of Sink Service")
sink_cr_signed = sign_cr(account_id=account_id, payload=sink_consent_payload, endpoint=endpoint)
except Exception as exp:
error_title = "Could not sign Consent Record of Sink Service"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record of Sink Service")
logger.debug("Consent Record of Sink Service: " + json.dumps(sink_cr_signed))
try:
logger.info("Signing Consent Status Record of Sink Service")
sink_csr_signed = sign_csr(account_id=account_id, payload=sink_status_payload, endpoint=endpoint)
except Exception as exp:
error_title = "Could not sign Consent Status Record of Sink Service"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Sink's Consent Status Record of Sink Service")
logger.debug("Consent Status Record of Sink Service: " + json.dumps(sink_csr_signed))
#########
# Store #
#########
logger.info("Creating objects to store")
# Service Link Record of Source Service
try:
logger.info("Creating Service Link Record of Source Service")
source_slr_entry = ServiceLinkRecord(
surrogate_id=source_consent_surrogate_id,
account_id=account_id,
service_link_record_id=source_consent_slr_id
)
except Exception as exp:
error_title = "Failed to create Source's Service Link Record object"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Source's Service Link Record object created")
logger.info("Source's Service Link Record object: " + source_slr_entry.log_entry)
# Service Link Record of Sink Service
try:
logger.info("Creating Source's Service Link Record object")
sink_slr_entry = ServiceLinkRecord(
surrogate_id=sink_consent_surrogate_id,
account_id=account_id,
service_link_record_id=sink_consent_slr_id
)
except Exception as exp:
error_title = "Failed to create Sink's Service Link Record object"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Source's Service Link Record object created")
logger.info("Source's Service Link Record object: " + sink_slr_entry.log_entry)
# Consent Record of Source Service
try:
logger.info("Creating Consent Record of Source Service")
source_cr_entry = ConsentRecord(
consent_record=source_cr_signed,
consent_id=source_consent_cr_id,
surrogate_id=source_consent_surrogate_id,
resource_set_id=source_consent_rs_id,
service_link_record_id=source_consent_slr_id,
subject_id=source_consent_subject_id,
role=source_consent_role,
consent_pair_id=sink_consent_cr_id,
accounts_id=account_id
)
except Exception as exp:
error_title = "Failed to create Source's Consent Record object"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record of Source Service created")
logger.info("Consent Record of Source Service: " + source_cr_entry.log_entry)
# Consent Record of Sink Service
try:
logger.info("Creating Consent Record of Sink Service")
sink_cr_entry = ConsentRecord(
consent_record=sink_cr_signed,
consent_id=sink_consent_cr_id,
surrogate_id=sink_consent_surrogate_id,
resource_set_id=sink_consent_rs_id,
service_link_record_id=sink_consent_slr_id,
subject_id=sink_consent_subject_id,
role=sink_consent_role,
consent_pair_id=source_consent_cr_id,
accounts_id=account_id
)
except Exception as exp:
error_title = "Failed to create Sink's Consent Record object"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record of Sink Service created")
logger.info("Consent Record of Sink Service: " + sink_cr_entry.log_entry)
# Consent Status Record of Source Service
try:
logger.info("Creating Consent Status Record of Source Service")
source_csr_entry = ConsentStatusRecord(
consent_status_record_id=source_status_record_id,
status=sink_status_consent_status,
consent_status_record=source_csr_signed,
consent_record_id=source_status_cr_id,
issued_at=source_status_iat,
prev_record_id=source_status_prev_record_id,
accounts_id=account_id
)
except Exception as exp:
error_title = "Failed to create Source's Consent Status Record object"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Status Record of Source Service created")
logger.info("Consent Status Record of Source Service: " + source_csr_entry.log_entry)
# Consent Status Record of Sink Service
try:
logger.info("Creating Consent Status Record of Sink Service")
sink_csr_entry = ConsentStatusRecord(
consent_status_record_id=sink_status_record_id,
status=sink_status_consent_status,
consent_status_record=sink_csr_signed,
consent_record_id=sink_status_cr_id,
issued_at=sink_status_iat,
prev_record_id=sink_status_prev_record_id,
accounts_id=account_id
)
except Exception as exp:
error_title = "Failed to create Sink's Consent Status Record object"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Status Record of Sink Service created")
logger.info("Consent Status Record of Sink Service: " + sink_csr_entry.log_entry)
# Store Consent Records and Consent Status Records
try:
logger.info("About to store Consent Records and Consent Status Records")
stored_source_cr_entry, stored_source_csr_entry, stored_sink_cr_entry, stored_sink_csr_entry = \
store_cr_and_csr(
source_slr_entry=source_slr_entry,
sink_slr_entry=sink_slr_entry,
source_cr_entry=source_cr_entry,
source_csr_entry=source_csr_entry,
sink_cr_entry=sink_cr_entry,
sink_csr_entry=sink_csr_entry,
endpoint=endpoint
)
except IndexError as exp:
error_title = "Could not store Consent Records and Consent Status Records"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
except KeyError as exp:
error_title = "Could not store Consent Records and Consent Status Records"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
except RuntimeError as exp:
error_title = "Could not store Consent Records and Consent Status Records"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "Could not store Consent Records and Consent Status Records"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Stored Consent Record and Consent Status Record")
logger.info("Source CR: " + stored_source_cr_entry.log_entry)
logger.info("Source CSR: " + stored_source_csr_entry.log_entry)
logger.info("Sink CR: " + stored_sink_cr_entry.log_entry)
logger.info("Sink CSR: " + stored_sink_csr_entry.log_entry)
# Response data container
try:
response_data = {}
response_data['data'] = {}
response_data['data']['source'] = {}
response_data['data']['source']['consent_record'] = stored_source_cr_entry.to_api_dict
response_data['data']['source']['consent_status_record'] = stored_source_csr_entry.to_api_dict
response_data['data']['sink'] = {}
response_data['data']['sink']['consent_record'] = stored_sink_cr_entry.to_api_dict
response_data['data']['sink']['consent_status_record'] = stored_sink_csr_entry.to_api_dict
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + json.dumps(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="POST",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + json.dumps(response_data_dict))
return make_json_response(data=response_data_dict, status_code=201)
class ApiAccountConsentStatus(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def post(self, account_id, consent_id):
try:
endpoint = str(api.url_for(self, account_id=account_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("consent_id: " + consent_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# load JSON
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
validate_json(json_data, schema_consent_status_new)
# Get elements from payload
try:
# Consent Status Record
status_payload = json_data['data']['attributes']
record_id = str(status_payload['record_id'])
surrogate_id = str(status_payload['surrogate_id'])
cr_id = str(status_payload['cr_id'])
consent_status = str(status_payload['consent_status'])
iat = int(status_payload['iat'])
prev_record_id = str(status_payload['prev_record_id'])
except Exception as exp:
error_title = "Could not Consent Status Record data"
error_detail = str(exp.__class__.__name__) + " - " + str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.debug("record_id from payload: " + record_id)
logger.debug("surrogate_id from payload: " + surrogate_id)
logger.debug("cr_id from payload: " + cr_id)
logger.debug("consent_status from payload: " + consent_status)
logger.debug("iat from payload: " + str(iat))
logger.debug("prev_record_id from payload: " + prev_record_id)
# Check IDs
try:
logger.info("Verify that Consent IDs from path and payload are matching")
compare_str_ids(id=consent_id, id_to_compare=cr_id)
except ValueError as exp:
error_title = "Consent IDs from path and payload are not matching"
error_detail = "Consent ID from path was {} and from payload {}".format(consent_id, cr_id)
logger.error(error_title + " - " + error_detail + ": " + str(exp.message))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent IDs from path and payload are matching")
######
# Sign Consent Records and Consent Status Records
####
try:
logger.info("Signing Consent Status Record")
csr_signed = sign_csr(account_id=account_id, payload=status_payload, endpoint=endpoint)
except Exception as exp:
error_title = "Could not sign Consent Status Record"
error_detail = str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Status Record signed")
logger.debug("Consent Status Record: " + json.dumps(csr_signed))
#
# Store Consent Status Record
try:
csr_object = store_csr(
account_id=account_id,
record_id=record_id,
cr_id=consent_id,
surrogate_id=surrogate_id,
consent_status=consent_status,
iat=iat,
prev_record_id=prev_record_id,
csr_signed=csr_signed,
endpoint=endpoint
)
except IndexError as exp:
error_title = "Could not store Consent Status Record"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
except KeyError as exp:
error_title = "Could not store Consent Status Record"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
except RuntimeError as exp:
error_title = "Could not store Consent Status Record"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "Could not store Consent Status Record"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.debug("Stored Consent Status Record: " + csr_object.log_entry)
# Response data container
try:
response_data = {}
response_data['data'] = csr_object.to_api_dict
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + json.dumps(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="POST",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + json.dumps(response_data_dict))
return make_json_response(data=response_data_dict, status_code=201)
class ApiAccountConsentStatusSigned(Resource):
@requires_api_auth_sdk
def post(self, account_id, consent_id):
try:
endpoint = str(api.url_for(self, account_id=account_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("consent_id: " + consent_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# load JSON
json_data = request.get_json()
if not json_data:
error_detail = {'0': 'Set application/json as Content-Type', '1': 'Provide json payload'}
raise ApiError(code=400, title="No input data provided", detail=error_detail, source=endpoint)
else:
logger.debug("json_data: " + json.dumps(json_data))
# Validate payload content
validate_json(json_data, schema_consent_status_signed_new)
# Get elements from payload
try:
# Consent Status Record
status_record_id = json_data['data']['csr']['id']
status_record = json_data['data']['csr']['attributes']
status_payload = json_data['data']['csr_payload']['attributes']
record_id = str(status_payload['record_id'])
surrogate_id = str(status_payload['surrogate_id'])
cr_id = str(status_payload['cr_id'])
consent_status = str(status_payload['consent_status'])
iat = int(status_payload['iat'])
prev_record_id = str(status_payload['prev_record_id'])
except Exception as exp:
error_title = "Could not Consent Status Record data"
error_detail = str(exp.__class__.__name__) + " - " + str(exp.message)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.debug("status_record from payload: " + json.dumps(status_record))
logger.debug("record_id from payload: " + record_id)
logger.debug("surrogate_id from payload: " + surrogate_id)
logger.debug("cr_id from payload: " + cr_id)
logger.debug("consent_status from payload: " + consent_status)
logger.debug("iat from payload: " + str(iat))
logger.debug("prev_record_id from payload: " + prev_record_id)
# Check IDs
try:
logger.info("Verify that Consent IDs from path and payload are matching")
compare_str_ids(id=consent_id, id_to_compare=cr_id)
except ValueError as exp:
error_title = "Consent IDs from path and payload are not matching"
error_detail = "Consent ID from path was {} and from payload {}".format(consent_id, cr_id)
logger.error(error_title + " - " + error_detail + ": " + str(exp.message))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent IDs from path and payload are matching")
# Check IDs
try:
logger.info("Verify that Consent Status Record IDs in payload are matching")
compare_str_ids(id=status_record_id, id_to_compare=record_id)
except ValueError as exp:
error_title = "Consent Status Record IDs in Consent Status Record and Consent Status Record payload are not matching"
error_detail = "Consent Status Record ID from Consent Status Record was {} and from Consent Status Record payload {}".format(status_record_id, record_id)
logger.error(error_title + " - " + error_detail + ": " + str(exp.message))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Status Record IDs in Consent Status Record and Consent Status Record payload are matching")
#
# Store Consent Status Record
try:
csr_object = store_csr(
account_id=account_id,
record_id=record_id,
cr_id=consent_id,
surrogate_id=surrogate_id,
consent_status=consent_status,
iat=iat,
prev_record_id=prev_record_id,
csr_signed=status_record,
endpoint=endpoint
)
except IndexError as exp:
error_title = "Could not store Consent Status Record"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=400, title=error_title, detail=error_detail, source=endpoint)
except KeyError as exp:
error_title = "Could not store Consent Status Record"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
except RuntimeError as exp:
error_title = "Could not store Consent Status Record"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "Could not store Consent Status Record"
error_detail = str(exp.message)
logger.error(error_title + " - " + repr(exp))
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.debug("Stored Consent Status Record: " + csr_object.log_entry)
# Response data container
try:
response_data = {}
response_data['data'] = csr_object.to_api_dict
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + json.dumps(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="POST",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + json.dumps(response_data_dict))
return make_json_response(data=response_data_dict, status_code=201)
class ApiConsentsForServiceLinkRecord(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id, link_id):
"""
List Consent Records related to Service Link Record
:param account_id:
:param link_id:
:return: JSON array
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, link_id=link_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Check query variables
try:
get_consent_pair = request.args.get('get_consent_pair', False)
get_consent_pair = str(get_consent_pair)
if get_consent_pair == "True":
get_consent_pair = True
else:
get_consent_pair = False
except Exception as exp:
raise ApiError(code=400, title="Unsupported get_consent_pair", detail=repr(exp), source=endpoint)
else:
if get_consent_pair:
logger.info("get_consent_pair from query params: True")
else:
logger.info("get_consent_pair from query params: False")
# Get ServiceLinkRecords
try:
logger.info("Fetching ConsentRecords")
db_entries, account_id_list = get_crs(slr_id=link_id, account_id=account_id, consent_pairs=get_consent_pair)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {} and Service Link ID was {}".format(account_id, link_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No ConsentRecords found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("ConsentRecords Fetched")
logger.debug("ConsentRecords: " + json.dumps(db_entries))
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentForServiceLinkRecord(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id, link_id, consent_id):
"""
Fetch Consent Record related to Service Link Record
:param account_id:
:param link_id:
:param consent_id:
:return:
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, link_id=link_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Check query variables
try:
get_consent_pair = request.args.get('get_consent_pair', False)
get_consent_pair = str(get_consent_pair)
if get_consent_pair == "True":
get_consent_pair = True
else:
get_consent_pair = False
except Exception as exp:
raise ApiError(code=400, title="Unsupported get_consent_pair", detail=repr(exp), source=endpoint)
else:
if get_consent_pair:
logger.info("get_consent_pair from query params: True")
else:
logger.info("get_consent_pair from query params: False")
# Get Consent Record
cr_array = []
try:
logger.info("Fetching Consent Record")
cr_entry, account_id_from_db = get_cr(cr_id=consent_id, slr_id=link_id, account_id=account_id)
cr_array.append(cr_entry)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {}, Service Link ID was {} and Consent ID was {}.".format(account_id, link_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Get Consent Pair
if get_consent_pair:
try:
logger.info("Fetching Consent Pair")
cr_entry, account_id_from_db = get_cr(account_id=account_id, consent_pair_id=consent_id)
cr_array.append(cr_entry)
except IndexError as exp:
error_title = "Consent Pair not found with provided information"
error_detail = "Account ID was {}, Service Link ID was {} and Consent Pair ID was {}.".format(account_id, link_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Pair found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Pair Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = cr_array
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiLastConsentForServiceLinkRecord(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id, link_id):
"""
Fetch Last Consent Record related to Service Link Record
:param account_id:
:param link_id:
:return:
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, link_id=link_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Check query variables
try:
get_consent_pair = request.args.get('get_consent_pair', False)
get_consent_pair = str(get_consent_pair)
if get_consent_pair == "True":
get_consent_pair = True
else:
get_consent_pair = False
except Exception as exp:
raise ApiError(code=400, title="Unsupported get_consent_pair", detail=repr(exp), source=endpoint)
else:
if get_consent_pair:
logger.info("get_consent_pair from query params: True")
else:
logger.info("get_consent_pair from query params: False")
# Get Consent Record
try:
logger.info("Fetching ConsentRecords")
cr_array = get_last_cr(slr_id=link_id, account_id=account_id, consent_pairs=get_consent_pair)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {} and Service Link ID was {}".format(account_id, link_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No ConsentRecords found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("ConsentRecords Fetched")
logger.debug("ConsentRecords: " + json.dumps(cr_array))
# Response data container
try:
response_data = {}
response_data['data'] = cr_array
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentStatusesForServiceLinkRecord(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id, link_id, consent_id):
"""
Fetch list of Consent Status Records
:param account_id:
:param link_id:
:param consent_id:
:return: JSON array
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, link_id=link_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("account_id: " + account_id)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("link_id: " + link_id)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("consent_id: " + consent_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Check query variables
try:
status_id = request.args.get('status_id', "")
status_id = str(status_id)
except Exception as exp:
raise ApiError(code=400, title="Unsupported status_id", detail=repr(exp), source=endpoint)
else:
logger.info("status_id from query params: {}".format(status_id))
# Get Consent Record
try:
logger.info("Fetching Consent Record")
cr, account_id_from_db = get_cr(cr_id=consent_id, slr_id=link_id, account_id=account_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {}, Service Link ID was {} and Consent ID was {}.".format(account_id, link_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Get Consent Status Records
try:
logger.info("Fetching Consent Status Records")
db_entries, account_id_list = get_csrs(account_id=account_id, consent_id=consent_id, status_id=status_id)
except IndexError as exp:
error_title = "Consent Status Records not found with provided information"
error_detail = "Account ID was {} Service Link ID was {}, and Consent ID was {}. Status ID from query parameters was {}.".format(account_id, link_id, consent_id, status_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Status Records found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Status Records Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentStatusForServiceLinkRecord(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id, link_id, consent_id, status_id):
"""
Fetch Consent Status Record
:param account_id:
:param link_id:
:param consent_id:
:param status_id:
:return:
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, link_id=link_id, consent_id=consent_id, status_id=status_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
status_id = str(status_id)
except Exception as exp:
error_title = "Unsupported status_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Consent Record
try:
logger.info("Fetching Consent Record")
cr, account_id_from_db = get_cr(cr_id=consent_id, slr_id=link_id, account_id=account_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {}, Service Link ID was {} and Consent ID was {}.".format(account_id, link_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Get Consent Status Record
try:
logger.info("Fetching Consent Record")
cr_entry, account_id_from_db = get_csr(cr_id=consent_id, account_id=account_id, csr_id=status_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {}, Service Link ID was {}, Consent ID was {} and Consent Status ID was {}.".format(account_id, link_id, consent_id, status_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = cr_entry
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentsForAccount(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id):
"""
Fetch list of Consent Records related to Account
:param account_id:
:param link_id:
:return: JSON array
"""
try:
endpoint = str(api.url_for(self, account_id=account_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get ServiceLinkRecords
try:
logger.info("Fetching ConsentRecords")
db_entries, account_id_list = get_crs(account_id=account_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {}".format(account_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No ConsentRecords found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("ConsentRecords Fetched")
logger.debug("ConsentRecords: " + json.dumps(db_entries))
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentForAccount(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id, consent_id):
"""
Fetch Consent Record related to Service Link Record
:param account_id:
:param consent_id:
:return:
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Check query variables
try:
get_consent_pair = request.args.get('get_consent_pair', False)
get_consent_pair = str(get_consent_pair)
if get_consent_pair == "True":
get_consent_pair = True
else:
get_consent_pair = False
except Exception as exp:
raise ApiError(code=400, title="Unsupported get_consent_pair", detail=repr(exp), source=endpoint)
else:
if get_consent_pair:
logger.info("get_consent_pair from query params: True")
else:
logger.info("get_consent_pair from query params: False")
# Get Consent Record
cr_array = []
try:
logger.info("Fetching Consent Record")
cr_entry, account_id_from_db = get_cr(cr_id=consent_id, account_id=account_id)
cr_array.append(cr_entry)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {} and Consent ID was {}.".format(account_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Get Consent Pair
if get_consent_pair:
try:
logger.info("Fetching Consent Pair")
cr_entry, account_id_from_db = get_cr(account_id=account_id, consent_pair_id=consent_id)
cr_array.append(cr_entry)
except IndexError as exp:
error_title = "Consent Pair not found with provided information"
error_detail = "Account ID was {} and Consent Pair ID was {}.".format(account_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Pair found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Pair Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = cr_array
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentStatusesForAccount(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id, consent_id):
"""
Fetch list of Consent Status Records
:param account_id:
:param consent_id:
:return: JSON array
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
try:
account_id = str(account_id)
except Exception as exp:
raise ApiError(code=400, title="Unsupported account_id", detail=repr(exp), source=endpoint)
else:
logger.debug("Account ID from path: " + account_id)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.info("consent_id: " + consent_id)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Check query variables
try:
status_id = request.args.get('status_id', "")
status_id = str(status_id)
except Exception as exp:
raise ApiError(code=400, title="Unsupported status_id", detail=repr(exp), source=endpoint)
else:
logger.info("status_id from query params: {}".format(status_id))
# Get Consent Status Records
try:
logger.info("Fetching Consent Status Records")
db_entries, account_id_from_db = get_csrs(account_id=account_id, consent_id=consent_id, status_id=status_id)
except IndexError as exp:
error_title = "Consent Status Records not found with provided information"
error_detail = "Account ID was {} and Consent ID was {}. Status ID from query parameters was {}.".format(account_id, consent_id, status_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Status Records found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Status Records Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentStatusForAccount(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id, consent_id, status_id):
"""
Fetch Consent Status Record
:param account_id:
:param consent_id:
:param status_id:
:return:
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, consent_id=consent_id, status_id=status_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
status_id = str(status_id)
except Exception as exp:
error_title = "Unsupported status_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Consent Status Record
try:
logger.info("Fetching Consent Record")
cr_entry, account_id_from_db = get_csr(cr_id=consent_id, account_id=account_id, csr_id=status_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {}, Consent ID was {} and Consent Status ID was {}.".format(account_id, consent_id, status_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = cr_entry
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiAccountConsentStatusLast(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id, consent_id):
"""
Fetch last Consent Status Record
:param account_id:
:param consent_id:
:return:
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Consent Record
try:
logger.info("Fetching Consent Record")
cr, account_id_from_db = get_cr(cr_id=consent_id, account_id=account_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {} and Consent ID was {}.".format(account_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Get last Consent Status Record
try:
logger.info("Fetching last Consent Status Record")
csr_dict = get_last_cr_status(consent_id=consent_id, account_id=account_id)
except IndexError as exp:
error_title = "Consent Status Record not found with provided information"
error_detail = "Account ID was {} and Consent ID was {}".format(account_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Status Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Last Consent Status Record Fetched")
logger.debug("Consent Status Record: " + json.dumps(csr_dict))
# Response data container
try:
response_data = {}
response_data['data'] = csr_dict
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + json.dumps(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + json.dumps(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiAccountConsentStatusLastForServiceLinkRecord(Resource):
@requires_api_auth_user
@requires_api_auth_sdk
def get(self, account_id, link_id, consent_id):
"""
Fetch last Consent Status Record
:param account_id:
:param consent_id:
:return:
"""
try:
endpoint = str(api.url_for(self, account_id=account_id, link_id=link_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching User API Key")
api_key_user = get_user_api_key(endpoint=endpoint)
logger.debug("api_key_user: " + api_key_user)
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
account_id = str(account_id)
except Exception as exp:
error_title = "Unsupported account_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Check if Account IDs from path and ApiKey are matching
if verify_account_id_match(account_id=account_id, api_key=api_key_user, endpoint=endpoint):
logger.info("Account IDs are matching")
# Get Consent Record
try:
logger.info("Fetching Consent Record")
cr, account_id_from_db = get_cr(cr_id=consent_id, slr_id=link_id, account_id=account_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Account ID was {}, Service Link ID was {} and Consent ID was {}.".format(account_id, link_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Get last Consent Status Record
try:
logger.info("Fetching last Consent Status Record")
csr_dict = get_last_cr_status(consent_id=consent_id, account_id=account_id)
except IndexError as exp:
error_title = "Consent Status Record not found with provided information"
error_detail = "Account ID was {} and Consent ID was {}".format(account_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Status Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Last Consent Status Record Fetched")
logger.debug("Consent Status Record: " + json.dumps(csr_dict))
# Response data container
try:
response_data = {}
response_data['data'] = csr_dict
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + json.dumps(response_data))
create_event_log_entry(
account_id=account_id,
actor="AccountOwner",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + json.dumps(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
######
# Service Section
#####
class ApiConsentsForService(Resource):
@requires_api_auth_sdk
def get(self, service_id, link_id):
"""
Fetch list of Consent Records related to Service
:param service_id:
:param link_id:
:return: JSON array
"""
try:
endpoint = str(api.url_for(self, service_id=service_id, link_id=link_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
service_id = str(service_id)
except Exception as exp:
error_title = "Unsupported service_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Get Consent Records
try:
logger.info("Fetching Consent Records")
db_entries, account_id_list = get_crs(subject_id=service_id, slr_id=link_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Service ID was {} and Service Link ID was {}".format(service_id, link_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Records found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Records Fetched")
logger.debug("Consent Records: " + json.dumps(db_entries))
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
for account_id_item in account_id_list:
create_event_log_entry(
account_id=account_id_item,
actor="Operator",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentForService(Resource):
@requires_api_auth_sdk
def get(self, service_id, link_id, consent_id):
"""
Fetch Consent Record related to Service
:param service_id:
:param link_id:
:param consent_id:
:return:
"""
try:
endpoint = str(api.url_for(self, service_id=service_id, link_id=link_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
service_id = str(service_id)
except Exception as exp:
error_title = "Unsupported service_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Get Consent Record
try:
logger.info("Fetching Consent Record")
cr_entry, account_id = get_cr(cr_id=consent_id, slr_id=link_id, subject_id=service_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Service ID was {}, Service Link ID was {} and Consent ID was {}.".format(service_id, link_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = cr_entry
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id,
actor="Operator",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentStatusesForService(Resource):
@requires_api_auth_sdk
def get(self, service_id, link_id, consent_id):
"""
Fetch list of Consent Status Records
:param service_id:
:param link_id:
:param consent_id:
:return: JSON array
"""
try:
endpoint = str(api.url_for(self, service_id=service_id, link_id=link_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
service_id = str(service_id)
except Exception as exp:
error_title = "Unsupported service_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Check query variables
try:
status_id = request.args.get('status_id', "")
status_id = str(status_id)
except Exception as exp:
raise ApiError(code=400, title="Unsupported status_id", detail=repr(exp), source=endpoint)
else:
logger.info("status_id from query params: {}".format(status_id))
# Get Consent Record
try:
logger.info("Fetching Consent Record")
cr_entry, account_id_from_db = get_cr(cr_id=consent_id, slr_id=link_id, subject_id=service_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Service ID was {}, Service Link ID was {} and Consent ID was {}.".format(service_id, link_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Get Consent Status Records
try:
logger.info("Fetching Consent Status Records")
db_entries, account_id_list = get_csrs(account_id="", consent_id=consent_id, status_id=status_id)
except IndexError as exp:
error_title = "Consent Status Records not found with provided information"
error_detail = "Service ID was {}, Service Link ID was {} and Consent ID was {}. Status ID from query parameters was {}.".format(service_id, link_id, consent_id, status_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Status Records found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Status Records Fetched")
# Response data container
try:
db_entry_list = db_entries
response_data = {}
response_data['data'] = db_entry_list
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
for account_id_entry in account_id_list:
create_event_log_entry(
account_id=account_id_entry,
actor="Operator",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentStatusForService(Resource):
@requires_api_auth_sdk
def get(self, service_id, link_id, consent_id, status_id):
"""
Fetch Consent Status Record
:param service_id:
:param link_id:
:param consent_id:
:param status_id:
:return:
"""
try:
endpoint = str(api.url_for(self, service_id=service_id, link_id=link_id, consent_id=consent_id, status_id=status_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
service_id = str(service_id)
except Exception as exp:
error_title = "Unsupported service_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
status_id = str(status_id)
except Exception as exp:
error_title = "Unsupported status_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Get Consent Record
try:
logger.info("Fetching Consent Record")
cr_entry, account_id_from_db = get_cr(cr_id=consent_id, slr_id=link_id, subject_id=service_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Service ID was {}, Service Link ID was {} and Consent ID was {}.".format(service_id, link_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Get Consent Status Record
try:
logger.info("Fetching Consent Record")
cr_entry, account_id_from_db = get_csr(cr_id=consent_id, account_id="", csr_id=status_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Consent ID was {} and Consent Status ID was {}.".format(consent_id, status_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Response data container
try:
response_data = {}
response_data['data'] = cr_entry
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + repr(response_data))
create_event_log_entry(
account_id=account_id_from_db,
actor="Operator",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + repr(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
class ApiConsentStatusLastForService(Resource):
@requires_api_auth_sdk
def get(self, service_id, link_id, consent_id):
"""
Fetch last Consent Status Record
:param service_id:
:param link_id:
:param consent_id:
:return:
"""
try:
endpoint = str(api.url_for(self, service_id=service_id, link_id=link_id, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
service_id = str(service_id)
except Exception as exp:
error_title = "Unsupported service_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
link_id = str(link_id)
except Exception as exp:
error_title = "Unsupported link_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Get Consent Record
try:
logger.info("Fetching Consent Record")
cr, account_id_from_db = get_cr(cr_id=consent_id, subject_id=service_id, slr_id=link_id)
except IndexError as exp:
error_title = "Consent Record not found with provided information"
error_detail = "Service ID was {}, Service Link ID was {} and Consent ID was {}.".format(service_id, link_id, consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=500, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Consent Record Fetched")
# Get last Consent Status Record
try:
logger.info("Fetching last Consent Status Record")
csr_dict = get_last_cr_status(consent_id=consent_id)
except IndexError as exp:
error_title = "Consent Status Record not found with provided information"
error_detail = "Consent ID was {}".format(consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "No Consent Status Record found"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Last Consent Status Record Fetched")
logger.debug("Consent Status Record: " + json.dumps(csr_dict))
# Response data container
try:
response_data = {}
response_data['data'] = csr_dict
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + json.dumps(response_data))
create_event_log_entry(
account_id=account_id_from_db,
actor="Operator",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + json.dumps(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
##
# Authorisation token
class AuthorisationTokenData(Resource):
@requires_api_auth_sdk
def get(self, consent_id):
"""
Fetch data needed to construct Authorisation token.
:param consent_id:
:return:
"""
try:
endpoint = str(api.url_for(self, consent_id=consent_id))
except Exception as exp:
endpoint = str(__name__)
finally:
logger.info("Request to: " + str(endpoint))
logger.info("Fetching SDK API Key")
api_key_sdk = get_sdk_api_key(endpoint=endpoint)
logger.debug("api_key_sdk: " + api_key_sdk)
# Check path variables
try:
consent_id = str(consent_id)
except Exception as exp:
error_title = "Unsupported consent_id"
logger.error(error_title + repr(exp))
raise ApiError(code=400, title=error_title, detail=repr(exp), source=endpoint)
# Init Sink's Consent Record Object
try:
sink_cr_entry = ConsentRecord(consent_id=consent_id, role="Sink")
except Exception as exp:
error_title = "Failed to create Sink's Consent Record object"
logger.error(error_title + ": " + repr(exp))
raise ApiError(code=500, title=error_title, detail=repr(exp), source=endpoint)
else:
logger.debug("sink_cr_entry: " + sink_cr_entry.log_entry)
try:
source_cr, sink_slr = get_auth_token_data(sink_cr_object=sink_cr_entry)
except IndexError as exp:
error_title = "Required data for Authorisation token could not be collected with provided information"
error_detail = "Consent ID was {}".format(consent_id)
logger.error(error_title + " - " + error_detail + ": " + repr(exp))
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
except Exception as exp:
error_title = "Could not get data for Authorisation token"
error_detail = repr(exp)
logger.error(error_title + " - " + error_detail)
raise ApiError(code=404, title=error_title, detail=error_detail, source=endpoint)
else:
logger.info("Authorisation token data Fetched")
logger.debug("Source Service's Consent Record: " + source_cr.log_entry)
logger.debug("Sink Service's Service Link Record: " + sink_slr.log_entry)
# Response data container
try:
response_data = {}
response_data['data'] = {}
response_data['data']['consent_record'] = source_cr.to_api_dict
response_data['data']['service_link_record'] = sink_slr.to_api_dict
except Exception as exp:
logger.error('Could not prepare response data: ' + repr(exp))
raise ApiError(code=500, title="Could not prepare response data", detail=repr(exp), source=endpoint)
else:
logger.info('Response data ready')
logger.debug('response_data: ' + json.dumps(response_data))
create_event_log_entry(
account_id=source_cr.accounts_id,
actor="Operator",
action="GET",
resource=endpoint,
timestamp=get_utc_time()
)
response_data_dict = dict(response_data)
logger.debug('response_data_dict: ' + json.dumps(response_data_dict))
return make_json_response(data=response_data_dict, status_code=200)
# Register resources
api.add_resource(
ApiAccountConsent,
'/accounts/<string:account_id>/servicelinks/<string:source_link_id>/<string:sink_link_id>/consents',
'/accounts/<string:account_id>/servicelinks/<string:source_link_id>/<string:sink_link_id>/consents/',
endpoint='authorisation_give_consent'
)
api.add_resource(
ApiAccountConsentStatus,
'/accounts/<string:account_id>/consents/<string:consent_id>/statuses',
'/accounts/<string:account_id>/consents/<string:consent_id>/statuses/',
endpoint='authorisation_consent_status'
)
api.add_resource(
ApiAccountConsentStatusSigned,
'/accounts/<string:account_id>/consents/<string:consent_id>/statuses/signed',
'/accounts/<string:account_id>/consents/<string:consent_id>/statuses/signed/',
endpoint='authorisation_consent_status_signed'
)
api.add_resource(
ApiConsentsForServiceLinkRecord,
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents',
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/',
endpoint='authorisation_account_link_consents'
)
api.add_resource(
ApiConsentForServiceLinkRecord,
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/<string:consent_id>',
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/',
endpoint='authorisation_account_link_consent'
)
api.add_resource(
ApiLastConsentForServiceLinkRecord,
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/last',
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/last/',
endpoint='authorisation_account_link_last_consent'
)
api.add_resource(
ApiConsentStatusesForServiceLinkRecord,
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses',
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses/',
endpoint='authorisation_account_link_consent_statuses'
)
api.add_resource(
ApiConsentStatusForServiceLinkRecord,
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses/<string:status_id>',
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses/<string:status_id>/',
endpoint='authorisation_account_link_consent_status'
)
api.add_resource(
ApiAccountConsentStatusLastForServiceLinkRecord,
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses/last',
'/accounts/<string:account_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses/last/',
endpoint='authorisation_account_link_consent_status_last'
)
api.add_resource(
ApiConsentsForAccount,
'/accounts/<string:account_id>/consents',
'/accounts/<string:account_id>/consents/',
endpoint='authorisation_account_consents'
)
api.add_resource(
ApiConsentForAccount,
'/accounts/<string:account_id>/consents/<string:consent_id>',
'/accounts/<string:account_id>/consents/<string:consent_id>/',
endpoint='authorisation_account_consent'
)
api.add_resource(
ApiConsentStatusesForAccount,
'/accounts/<string:account_id>/consents/<string:consent_id>/statuses',
'/accounts/<string:account_id>/consents/<string:consent_id>/statuses/',
endpoint='authorisation_account_consent_statuses'
)
api.add_resource(
ApiConsentStatusForAccount,
'/accounts/<string:account_id>/consents/<string:consent_id>/statuses/<string:status_id>',
'/accounts/<string:account_id>/consents/<string:consent_id>/statuses/<string:status_id>/',
endpoint='authorisation_account_consent_status'
)
api.add_resource(
ApiAccountConsentStatusLast,
'/accounts/<string:account_id>/consents/<string:consent_id>/statuses/last',
'/accounts/<string:account_id>/consents/<string:consent_id>/statuses/last/',
endpoint='authorisation_account_consent_status_last'
)
## For Service
api.add_resource(
ApiConsentsForService,
'/services/<string:service_id>/servicelinks/<string:link_id>/consents',
'/services/<string:service_id>/servicelinks/<string:link_id>/consents/',
endpoint='authorisation_service_consents'
)
api.add_resource(
ApiConsentForService,
'/services/<string:service_id>/servicelinks/<string:link_id>/consents/<string:consent_id>',
'/services/<string:service_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/',
endpoint='authorisation_service_consent'
)
api.add_resource(
ApiConsentStatusesForService,
'/services/<string:service_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses',
'/services/<string:service_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses/',
endpoint='authorisation_service_consent_statuses'
)
api.add_resource(
ApiConsentStatusForService,
'/services/<string:service_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses/<string:status_id>',
'/services/<string:service_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses/<string:status_id>/',
endpoint='authorisation_service_consent_status'
)
api.add_resource(
ApiConsentStatusLastForService,
'/services/<string:service_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses/last',
'/services/<string:service_id>/servicelinks/<string:link_id>/consents/<string:consent_id>/statuses/last/',
endpoint='authorisation_service_consent_status_last'
)
api.add_resource(
AuthorisationTokenData,
'/consents/<string:consent_id>/authorisationtoken/',
endpoint='authorisation-token'
)
| 44.84029
| 184
| 0.636629
| 14,844
| 123,535
| 5.034694
| 0.017785
| 0.0566
| 0.039125
| 0.038804
| 0.938449
| 0.901733
| 0.884686
| 0.870971
| 0.857015
| 0.835526
| 0
| 0.006517
| 0.272158
| 123,535
| 2,754
| 185
| 44.856572
| 0.824666
| 0.044635
| 0
| 0.805326
| 0
| 0.00551
| 0.199926
| 0.037287
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009183
| false
| 0
| 0.004132
| 0
| 0.03168
| 0.000918
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8815a5f2290988f0f3bece29932f81cb57fb371a
| 11,658
|
py
|
Python
|
decora_wifi/models/app.py
|
balloob/python-decora_wifi
|
47900ad67002f3655fc4c799518bc4e73293ceb4
|
[
"MIT"
] | 33
|
2017-09-02T16:37:15.000Z
|
2021-12-28T15:24:39.000Z
|
decora_wifi/models/app.py
|
balloob/python-decora_wifi
|
47900ad67002f3655fc4c799518bc4e73293ceb4
|
[
"MIT"
] | 17
|
2017-09-12T04:53:07.000Z
|
2022-01-25T03:31:45.000Z
|
decora_wifi/models/app.py
|
balloob/python-decora_wifi
|
47900ad67002f3655fc4c799518bc4e73293ceb4
|
[
"MIT"
] | 21
|
2018-01-29T22:50:06.000Z
|
2022-01-06T02:30:47.000Z
|
# Leviton Cloud Services API model App.
# Auto-generated by api_scraper.py.
#
# Copyright 2017 Tim Lyakhovetskiy <tlyakhov@gmail.com>
#
# This code is released under the terms of the MIT license. See the LICENSE
# file for more details.
from decora_wifi.base_model import BaseModel
class App(BaseModel):
def __init__(self, session, model_id=None):
super(App, self).__init__(session, model_id)
@classmethod
def count(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/count"
return session.call_api(api, attribs, 'get')
def count_organizations(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/organizations/count".format(self._id)
return self._session.call_api(api, attribs, 'get')
def count_preferences(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/preferences/count".format(self._id)
return self._session.call_api(api, attribs, 'get')
def count_subscription_plans(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/subscriptionPlans/count".format(self._id)
return self._session.call_api(api, attribs, 'get')
def count_updates(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/updates/count".format(self._id)
return self._session.call_api(api, attribs, 'get')
@classmethod
def create(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps"
return session.call_api(api, attribs, 'post')
@classmethod
def create_many(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps"
return session.call_api(api, attribs, 'post')
def create_organizations(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/organizations".format(self._id)
return self._session.call_api(api, attribs, 'post')
def create_preferences(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/preferences".format(self._id)
return self._session.call_api(api, attribs, 'post')
def create_subscription_plans(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/subscriptionPlans".format(self._id)
return self._session.call_api(api, attribs, 'post')
def create_updates(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/updates".format(self._id)
return self._session.call_api(api, attribs, 'post')
def delete_by_id(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}".format(self._id)
return self._session.call_api(api, attribs, 'delete')
def delete_organizations(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/organizations".format(self._id)
return self._session.call_api(api, attribs, 'delete')
def delete_preferences(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/preferences".format(self._id)
return self._session.call_api(api, attribs, 'delete')
def delete_subscription_plans(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/subscriptionPlans".format(self._id)
return self._session.call_api(api, attribs, 'delete')
def delete_updates(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/updates".format(self._id)
return self._session.call_api(api, attribs, 'delete')
def destroy_by_id_organizations(self, organization_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/organizations/{1}".format(self._id, organization_id)
return self._session.call_api(api, attribs, 'delete')
def destroy_by_id_preferences(self, preference_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/preferences/{1}".format(self._id, preference_id)
return self._session.call_api(api, attribs, 'delete')
def destroy_by_id_subscription_plans(self, subscription_plan_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/subscriptionPlans/{1}".format(self._id, subscription_plan_id)
return self._session.call_api(api, attribs, 'delete')
def destroy_by_id_updates(self, update_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/updates/{1}".format(self._id, update_id)
return self._session.call_api(api, attribs, 'delete')
def exists(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/exists".format(self._id)
return self._session.call_api(api, attribs, 'get')
@classmethod
def find(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps"
items = session.call_api(api, attribs, 'get')
result = []
if items is not None:
for data in items:
model = App(session, data['id'])
model.data = data
result.append(model)
return result
def find_by_id(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}".format(self._id)
data = self._session.call_api(api, attribs, 'get')
self.data.update(data)
return self
def find_by_id_organizations(self, organization_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/organizations/{1}".format(self._id, organization_id)
data = self._session.call_api(api, attribs, 'get')
from .organization import Organization
model = Organization(self._session, data['id'])
model.data = data
return model
def find_by_id_preferences(self, preference_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/preferences/{1}".format(self._id, preference_id)
data = self._session.call_api(api, attribs, 'get')
from .preference import Preference
model = Preference(self._session, data['id'])
model.data = data
return model
def find_by_id_subscription_plans(self, subscription_plan_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/subscriptionPlans/{1}".format(self._id, subscription_plan_id)
return self._session.call_api(api, attribs, 'get')
def find_by_id_updates(self, update_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/updates/{1}".format(self._id, update_id)
return self._session.call_api(api, attribs, 'get')
@classmethod
def find_one(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/findOne"
return session.call_api(api, attribs, 'get')
def refresh(self):
api = "/Apps/{0}".format(self._id)
result = self._session.call_api(api, {}, 'get')
if result is not None:
self.data.update(result)
return self
def get_organizations(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/organizations".format(self._id)
items = self._session.call_api(api, attribs, 'get')
from .organization import Organization
result = []
if items is not None:
for data in items:
model = Organization(self._session, data['id'])
model.data = data
result.append(model)
return result
def get_preferences(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/preferences".format(self._id)
items = self._session.call_api(api, attribs, 'get')
from .preference import Preference
result = []
if items is not None:
for data in items:
model = Preference(self._session, data['id'])
model.data = data
result.append(model)
return result
def get_subscription_plans(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/subscriptionPlans".format(self._id)
return self._session.call_api(api, attribs, 'get')
def get_updates(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/updates".format(self._id)
return self._session.call_api(api, attribs, 'get')
def replace_by_id(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/replace".format(self._id)
return self._session.call_api(api, attribs, 'post')
@classmethod
def replace_or_create(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/replaceOrCreate"
return session.call_api(api, attribs, 'post')
@classmethod
def update_all(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/update"
return session.call_api(api, attribs, 'post')
def update_attributes(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}".format(self._id)
data = self._session.call_api(api, attribs, 'put')
self.data.update(attribs)
return self
def update_by_id_organizations(self, organization_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/organizations/{1}".format(self._id, organization_id)
data = self._session.call_api(api, attribs, 'put')
from .organization import Organization
model = Organization(self._session, data['id'])
model.data = data
return model
def update_by_id_preferences(self, preference_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/preferences/{1}".format(self._id, preference_id)
data = self._session.call_api(api, attribs, 'put')
from .preference import Preference
model = Preference(self._session, data['id'])
model.data = data
return model
def update_by_id_subscription_plans(self, subscription_plan_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/subscriptionPlans/{1}".format(self._id, subscription_plan_id)
return self._session.call_api(api, attribs, 'put')
def update_by_id_updates(self, update_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/{0}/updates/{1}".format(self._id, update_id)
return self._session.call_api(api, attribs, 'put')
@classmethod
def upsert(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps"
data = session.call_api(api, attribs, 'put')
model = App(session, data['id'])
model.data = data
return model
@classmethod
def upsert_with_where(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Apps/upsertWithWhere"
return session.call_api(api, attribs, 'post')
| 34.389381
| 86
| 0.598559
| 1,408
| 11,658
| 4.790483
| 0.067472
| 0.044626
| 0.089251
| 0.108377
| 0.90126
| 0.898147
| 0.887176
| 0.882135
| 0.854707
| 0.840474
| 0
| 0.005955
| 0.279722
| 11,658
| 338
| 87
| 34.491124
| 0.797309
| 0.019043
| 0
| 0.763636
| 1
| 0
| 0.09075
| 0.059421
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0
| 0.025455
| 0
| 0.345455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ee00ff0d3a1853116d339444a46027ed144a160a
| 189
|
py
|
Python
|
drosoph_vae/models/__init__.py
|
samuelsmal/drosophVAE
|
4b1887e55a5eed1d26c07b6c43de59ffab5fc7c7
|
[
"MIT"
] | null | null | null |
drosoph_vae/models/__init__.py
|
samuelsmal/drosophVAE
|
4b1887e55a5eed1d26c07b6c43de59ffab5fc7c7
|
[
"MIT"
] | null | null | null |
drosoph_vae/models/__init__.py
|
samuelsmal/drosophVAE
|
4b1887e55a5eed1d26c07b6c43de59ffab5fc7c7
|
[
"MIT"
] | null | null | null |
from drosoph_vae.models.drosoph_vae import DrosophVAE
from drosoph_vae.models.drosoph_vae_conv import DrosophVAEConv
from drosoph_vae.models.drosoph_vae_skip_conv import DrosophVAESkipConv
| 47.25
| 71
| 0.904762
| 27
| 189
| 6
| 0.37037
| 0.37037
| 0.259259
| 0.37037
| 0.555556
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063492
| 189
| 3
| 72
| 63
| 0.915254
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ee5506aa5cb3a711bd0bc045e3564ce1cc11f342
| 5,683
|
py
|
Python
|
tests/api/utils/schema/analytics.py
|
satroutr/poppy
|
27417f86854d9e0a04726acc263ef0a2ce9f8f6e
|
[
"Apache-2.0"
] | 3
|
2017-07-05T20:09:59.000Z
|
2018-11-27T22:02:57.000Z
|
tests/api/utils/schema/analytics.py
|
satroutr/poppy
|
27417f86854d9e0a04726acc263ef0a2ce9f8f6e
|
[
"Apache-2.0"
] | 24
|
2017-04-18T15:14:04.000Z
|
2019-03-20T19:09:07.000Z
|
tests/api/utils/schema/analytics.py
|
satroutr/poppy
|
27417f86854d9e0a04726acc263ef0a2ce9f8f6e
|
[
"Apache-2.0"
] | 8
|
2017-04-03T13:24:27.000Z
|
2021-11-08T20:28:10.000Z
|
# Copyright (c) 2016 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
get_request_count = {
'type': 'object',
'properties': {
'domain': {'type': 'string', 'format': 'uri'},
'requestCount':
{'type': 'object',
'properties': {
'India': {'type': 'array', 'items': {'type': 'array'}},
'EMEA': {'type': 'array', 'items': {'type': 'array'}},
'APAC': {'type': 'array', 'items': {'type': 'array'}},
'North America': {
'type': 'array', 'items': {'type': 'array'}},
'South America': {
'type': 'array', 'items': {'type': 'array'}},
'Japan': {'type': 'array', 'items': {'type': 'array'}},
}},
'flavor': {'type': 'string', 'enum': ['cdn']},
'provider': {'type': 'string', 'enum': ['akamai']},
}
}
get_bandwidthOut = {
'type': 'object',
'properties': {
'domain': {'type': 'string', 'format': 'uri'},
'bandwidthOut':
{'type': 'object',
'properties': {
'India': {'type': 'array', 'items': {'type': 'array'}},
'EMEA': {'type': 'array', 'items': {'type': 'array'}},
'APAC': {'type': 'array', 'items': {'type': 'array'}},
'North America': {
'type': 'array', 'items': {'type': 'array'}},
'South America': {
'type': 'array', 'items': {'type': 'array'}},
'Japan': {'type': 'array', 'items': {'type': 'array'}},
}},
'flavor': {'type': 'string', 'enum': ['cdn']},
'provider': {'type': 'string', 'enum': ['akamai']},
}
}
get_httpResponseCode_2XX = {
'type': 'object',
'properties': {
'domain': {'type': 'string', 'format': 'uri'},
'httpResponseCode_2XX':
{'type': 'object',
'properties': {
'India': {'type': 'array', 'items': {'type': 'array'}},
'EMEA': {'type': 'array', 'items': {'type': 'array'}},
'APAC': {'type': 'array', 'items': {'type': 'array'}},
'North America': {
'type': 'array', 'items': {'type': 'array'}},
'South America': {
'type': 'array', 'items': {'type': 'array'}},
'Japan': {'type': 'array', 'items': {'type': 'array'}},
}},
'flavor': {'type': 'string', 'enum': ['cdn']},
'provider': {'type': 'string', 'enum': ['akamai']},
}
}
get_httpResponseCode_3XX = {
'type': 'object',
'properties': {
'domain': {'type': 'string', 'format': 'uri'},
'httpResponseCode_3XX':
{'type': 'object',
'properties': {
'India': {'type': 'array', 'items': {'type': 'array'}},
'EMEA': {'type': 'array', 'items': {'type': 'array'}},
'APAC': {'type': 'array', 'items': {'type': 'array'}},
'North America': {
'type': 'array', 'items': {'type': 'array'}},
'South America': {
'type': 'array', 'items': {'type': 'array'}},
'Japan': {'type': 'array', 'items': {'type': 'array'}},
}},
'flavor': {'type': 'string', 'enum': ['cdn']},
'provider': {'type': 'string', 'enum': ['akamai']},
}
}
get_httpResponseCode_4XX = {
'type': 'object',
'properties': {
'domain': {'type': 'string', 'format': 'uri'},
'httpResponseCode_4XX':
{'type': 'object',
'properties': {
'India': {'type': 'array', 'items': {'type': 'array'}},
'EMEA': {'type': 'array', 'items': {'type': 'array'}},
'APAC': {'type': 'array', 'items': {'type': 'array'}},
'North America': {
'type': 'array', 'items': {'type': 'array'}},
'South America': {
'type': 'array', 'items': {'type': 'array'}},
'Japan': {'type': 'array', 'items': {'type': 'array'}},
}},
'flavor': {'type': 'string', 'enum': ['cdn']},
'provider': {'type': 'string', 'enum': ['akamai']},
}
}
get_httpResponseCode_5XX = {
'type': 'object',
'properties': {
'domain': {'type': 'string', 'format': 'uri'},
'httpResponseCode_5XX':
{'type': 'object',
'properties': {
'India': {'type': 'array', 'items': {'type': 'array'}},
'EMEA': {'type': 'array', 'items': {'type': 'array'}},
'APAC': {'type': 'array', 'items': {'type': 'array'}},
'North America': {
'type': 'array', 'items': {'type': 'array'}},
'South America': {
'type': 'array', 'items': {'type': 'array'}},
'Japan': {'type': 'array', 'items': {'type': 'array'}},
}},
'flavor': {'type': 'string', 'enum': ['cdn']},
'provider': {'type': 'string', 'enum': ['akamai']},
}
}
| 40.304965
| 72
| 0.431286
| 472
| 5,683
| 5.161017
| 0.201271
| 0.26601
| 0.206897
| 0.26601
| 0.797619
| 0.787767
| 0.787767
| 0.787767
| 0.750821
| 0.650657
| 0
| 0.004209
| 0.331163
| 5,683
| 140
| 73
| 40.592857
| 0.636675
| 0.09766
| 0
| 0.8
| 0
| 0
| 0.370037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ee899548a1b84acae0039602bbb116ac8072d89e
| 1,283
|
py
|
Python
|
Binary Search/875. Koko Eating Bananas.py
|
beckswu/Leetcode
|
480e8dc276b1f65961166d66efa5497d7ff0bdfd
|
[
"MIT"
] | 138
|
2020-02-08T05:25:26.000Z
|
2021-11-04T11:59:28.000Z
|
Binary Search/875. Koko Eating Bananas.py
|
beckswu/Leetcode
|
480e8dc276b1f65961166d66efa5497d7ff0bdfd
|
[
"MIT"
] | null | null | null |
Binary Search/875. Koko Eating Bananas.py
|
beckswu/Leetcode
|
480e8dc276b1f65961166d66efa5497d7ff0bdfd
|
[
"MIT"
] | 24
|
2021-01-02T07:18:43.000Z
|
2022-03-20T08:17:54.000Z
|
class Solution:
def minEatingSpeed(self, piles: List[int], h: int) -> int:
def isok(k):
count = 0
for p in piles:
count += (p - 1) // k + 1
return count <= h
l = 1
r = max(piles)
while l < r:
mid = (l + r)//2
if isok(mid):
r = mid
else:
l = mid + 1
return l
class Solution:
def minEatingSpeed(self, piles: List[int], h: int) -> int:
l, r = 1, max(piles)
while l < r:
m = (l + r) // 2
if sum((p + m - 1) // m for p in piles) > h:
l = m + 1
else:
r = m
return l
class Solution:
def minEatingSpeed(self, piles: List[int], h: int) -> int:
def isok(k):
count = 0
for p in piles:
if p <= k:
count += 1
else:
count += p // k + (1 if p % k != 0 else 0)
return count <= h
l = 1
r = max(piles)
while l < r:
mid = (l + r)//2
if isok(mid):
r = mid
else:
l = mid + 1
return l
| 26.183673
| 62
| 0.342167
| 155
| 1,283
| 2.832258
| 0.180645
| 0.031891
| 0.109339
| 0.205011
| 0.822323
| 0.788155
| 0.788155
| 0.788155
| 0.788155
| 0.788155
| 0
| 0.031142
| 0.549493
| 1,283
| 49
| 63
| 26.183673
| 0.728374
| 0
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0
| 0
| 0.288889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a0076ae00e8829a9db3e282b210ec8ef4da2b341
| 98
|
py
|
Python
|
models/__init__.py
|
ZenithClown/DQNProjects
|
3a9f022166022509fce0f4306ed5612854d5539f
|
[
"Apache-2.0"
] | null | null | null |
models/__init__.py
|
ZenithClown/DQNProjects
|
3a9f022166022509fce0f4306ed5612854d5539f
|
[
"Apache-2.0"
] | null | null | null |
models/__init__.py
|
ZenithClown/DQNProjects
|
3a9f022166022509fce0f4306ed5612854d5539f
|
[
"Apache-2.0"
] | null | null | null |
# -*- encoding: utf-8 -*-
from .linear import * # noqa: F403
from .trainer import * # noqa: F403
| 19.6
| 35
| 0.622449
| 13
| 98
| 4.692308
| 0.692308
| 0.327869
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089744
| 0.204082
| 98
| 4
| 36
| 24.5
| 0.692308
| 0.459184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4e548632bbb90189b06d8f7cdd841f8da639a602
| 9,729
|
py
|
Python
|
tests/em/static/test_IP_jvecjtvecadj.py
|
kimjaed/simpeg
|
b8d716f86a4ea07ba3085fabb24c2bc974788040
|
[
"MIT"
] | 3
|
2020-11-27T03:18:28.000Z
|
2022-03-18T01:29:58.000Z
|
tests/em/static/test_IP_jvecjtvecadj.py
|
kimjaed/simpeg
|
b8d716f86a4ea07ba3085fabb24c2bc974788040
|
[
"MIT"
] | null | null | null |
tests/em/static/test_IP_jvecjtvecadj.py
|
kimjaed/simpeg
|
b8d716f86a4ea07ba3085fabb24c2bc974788040
|
[
"MIT"
] | 1
|
2021-01-14T08:33:45.000Z
|
2021-01-14T08:33:45.000Z
|
from __future__ import print_function
import unittest
from SimPEG import Mesh
from SimPEG import Maps
from SimPEG import DataMisfit
from SimPEG import Regularization
from SimPEG import Optimization
from SimPEG import Inversion
from SimPEG import InvProblem
from SimPEG import Tests
import numpy as np
import SimPEG.EM.Static.DC as DC
import SimPEG.EM.Static.IP as IP
np.random.seed(30)
class IPProblemTestsCC(unittest.TestCase):
def setUp(self):
aSpacing = 2.5
nElecs = 5
surveySize = nElecs * aSpacing - aSpacing
cs = surveySize / nElecs / 4
mesh = Mesh.TensorMesh([
[(cs, 10, -1.3), (cs, surveySize/cs), (cs, 10, 1.3)],
[(cs, 3, -1.3), (cs, 3, 1.3)],
# [(cs, 5, -1.3), (cs, 10)]
], 'CN')
srcList = DC.Utils.WennerSrcList(nElecs, aSpacing, in2D=True)
survey = IP.Survey(srcList)
sigma = np.ones(mesh.nC)
problem = IP.Problem3D_CC(
mesh, sigma=sigma, etaMap=Maps.IdentityMap(mesh)
)
problem.pair(survey)
mSynth = np.ones(mesh.nC)*0.1
survey.makeSyntheticData(mSynth)
# Now set up the problem to do some minimization
dmis = DataMisfit.l2_DataMisfit(survey)
reg = Regularization.Tikhonov(mesh)
opt = Optimization.InexactGaussNewton(
maxIterLS=20, maxIter=10, tolF=1e-6,
tolX=1e-6, tolG=1e-6, maxIterCG=6
)
invProb = InvProblem.BaseInvProblem(dmis, reg, opt, beta=1e4)
inv = Inversion.BaseInversion(invProb)
self.inv = inv
self.reg = reg
self.p = problem
self.mesh = mesh
self.m0 = mSynth
self.survey = survey
self.dmis = dmis
def test_misfit(self):
passed = Tests.checkDerivative(
lambda m: [
self.survey.dpred(m), lambda mx: self.p.Jvec(self.m0, mx)
],
self.m0,
plotIt=False,
num=3
)
self.assertTrue(passed)
def test_adjoint(self):
# Adjoint Test
# u = np.random.rand(self.mesh.nC*self.survey.nSrc)
v = np.random.rand(self.mesh.nC)
w = np.random.rand(self.survey.dobs.shape[0])
wtJv = w.dot(self.p.Jvec(self.m0, v))
vtJtw = v.dot(self.p.Jtvec(self.m0, w))
passed = np.abs(wtJv - vtJtw) < 1e-10
print('Adjoint Test', np.abs(wtJv - vtJtw), passed)
self.assertTrue(passed)
def test_dataObj(self):
passed = Tests.checkDerivative(
lambda m: [self.dmis(m), self.dmis.deriv(m)],
self.m0,
plotIt=False,
num=3
)
self.assertTrue(passed)
class IPProblemTestsN(unittest.TestCase):
def setUp(self):
aSpacing = 2.5
nElecs = 5
surveySize = nElecs * aSpacing - aSpacing
cs = surveySize / nElecs / 4
mesh = Mesh.TensorMesh([
[(cs, 10, -1.3), (cs, surveySize/cs), (cs, 10, 1.3)],
[(cs, 3, -1.3), (cs, 3, 1.3)],
# [(cs, 5, -1.3), (cs, 10)]
], 'CN')
srcList = DC.Utils.WennerSrcList(nElecs, aSpacing, in2D=True)
survey = IP.Survey(srcList)
sigma = np.ones(mesh.nC)
problem = IP.Problem3D_N(
mesh, sigma=sigma, etaMap=Maps.IdentityMap(mesh)
)
problem.pair(survey)
mSynth = np.ones(mesh.nC)*0.1
survey.makeSyntheticData(mSynth)
# Now set up the problem to do some minimization
dmis = DataMisfit.l2_DataMisfit(survey)
reg = Regularization.Tikhonov(mesh)
opt = Optimization.InexactGaussNewton(
maxIterLS=20, maxIter=10, tolF=1e-6,
tolX=1e-6, tolG=1e-6, maxIterCG=6
)
invProb = InvProblem.BaseInvProblem(dmis, reg, opt, beta=1e4)
inv = Inversion.BaseInversion(invProb)
self.inv = inv
self.reg = reg
self.p = problem
self.mesh = mesh
self.m0 = mSynth
self.survey = survey
self.dmis = dmis
def test_misfit(self):
passed = Tests.checkDerivative(
lambda m: [
self.survey.dpred(m), lambda mx: self.p.Jvec(self.m0, mx)
],
self.m0,
plotIt=False,
num=3
)
self.assertTrue(passed)
def test_adjoint(self):
# Adjoint Test
# u = np.random.rand(self.mesh.nC*self.survey.nSrc)
v = np.random.rand(self.mesh.nC)
w = np.random.rand(self.survey.dobs.shape[0])
wtJv = w.dot(self.p.Jvec(self.m0, v))
vtJtw = v.dot(self.p.Jtvec(self.m0, w))
passed = np.abs(wtJv - vtJtw) < 1e-8
print('Adjoint Test', np.abs(wtJv - vtJtw), passed)
self.assertTrue(passed)
def test_dataObj(self):
passed = Tests.checkDerivative(
lambda m: [self.dmis(m), self.dmis.deriv(m)],
self.m0,
plotIt=False,
num=3
)
self.assertTrue(passed)
class IPProblemTestsCC_storeJ(unittest.TestCase):
def setUp(self):
aSpacing = 2.5
nElecs = 5
surveySize = nElecs * aSpacing - aSpacing
cs = surveySize / nElecs / 4
mesh = Mesh.TensorMesh([
[(cs, 10, -1.3), (cs, surveySize/cs), (cs, 10, 1.3)],
[(cs, 3, -1.3), (cs, 3, 1.3)],
# [(cs, 5, -1.3), (cs, 10)]
], 'CN')
srcList = DC.Utils.WennerSrcList(nElecs, aSpacing, in2D=True)
survey = IP.Survey(srcList)
sigma = np.ones(mesh.nC)
problem = IP.Problem3D_CC(
mesh, sigma=sigma, etaMap=Maps.IdentityMap(mesh), storeJ=True
)
problem.pair(survey)
mSynth = np.ones(mesh.nC)*0.1
survey.makeSyntheticData(mSynth)
# Now set up the problem to do some minimization
dmis = DataMisfit.l2_DataMisfit(survey)
reg = Regularization.Tikhonov(mesh)
opt = Optimization.InexactGaussNewton(
maxIterLS=20, maxIter=10, tolF=1e-6,
tolX=1e-6, tolG=1e-6, maxIterCG=6
)
invProb = InvProblem.BaseInvProblem(dmis, reg, opt, beta=1e4)
inv = Inversion.BaseInversion(invProb)
self.inv = inv
self.reg = reg
self.p = problem
self.mesh = mesh
self.m0 = mSynth
self.survey = survey
self.dmis = dmis
def test_misfit(self):
passed = Tests.checkDerivative(
lambda m: [
self.survey.dpred(m), lambda mx: self.p.Jvec(self.m0, mx)
],
self.m0,
plotIt=False,
num=3
)
self.assertTrue(passed)
def test_adjoint(self):
# Adjoint Test
# u = np.random.rand(self.mesh.nC*self.survey.nSrc)
v = np.random.rand(self.mesh.nC)
w = np.random.rand(self.survey.dobs.shape[0])
wtJv = w.dot(self.p.Jvec(self.m0, v))
vtJtw = v.dot(self.p.Jtvec(self.m0, w))
passed = np.abs(wtJv - vtJtw) < 1e-10
print('Adjoint Test', np.abs(wtJv - vtJtw), passed)
self.assertTrue(passed)
def test_dataObj(self):
passed = Tests.checkDerivative(
lambda m: [self.dmis(m), self.dmis.deriv(m)],
self.m0,
plotIt=False,
num=3
)
self.assertTrue(passed)
class IPProblemTestsN_storeJ(unittest.TestCase):
def setUp(self):
aSpacing = 2.5
nElecs = 5
surveySize = nElecs * aSpacing - aSpacing
cs = surveySize / nElecs / 4
mesh = Mesh.TensorMesh([
[(cs, 10, -1.3), (cs, surveySize/cs), (cs, 10, 1.3)],
[(cs, 3, -1.3), (cs, 3, 1.3)],
# [(cs, 5, -1.3), (cs, 10)]
], 'CN')
srcList = DC.Utils.WennerSrcList(nElecs, aSpacing, in2D=True)
survey = IP.Survey(srcList)
sigma = np.ones(mesh.nC)
problem = IP.Problem3D_N(
mesh, sigma=sigma, etaMap=Maps.IdentityMap(mesh), storeJ=True
)
problem.pair(survey)
mSynth = np.ones(mesh.nC)*0.1
survey.makeSyntheticData(mSynth)
# Now set up the problem to do some minimization
dmis = DataMisfit.l2_DataMisfit(survey)
reg = Regularization.Tikhonov(mesh)
opt = Optimization.InexactGaussNewton(
maxIterLS=20, maxIter=10, tolF=1e-6,
tolX=1e-6, tolG=1e-6, maxIterCG=6
)
invProb = InvProblem.BaseInvProblem(dmis, reg, opt, beta=1e4)
inv = Inversion.BaseInversion(invProb)
self.inv = inv
self.reg = reg
self.p = problem
self.mesh = mesh
self.m0 = mSynth
self.survey = survey
self.dmis = dmis
def test_misfit(self):
passed = Tests.checkDerivative(
lambda m: [
self.survey.dpred(m), lambda mx: self.p.Jvec(self.m0, mx)
],
self.m0,
plotIt=False,
num=3
)
self.assertTrue(passed)
def test_adjoint(self):
# Adjoint Test
# u = np.random.rand(self.mesh.nC*self.survey.nSrc)
v = np.random.rand(self.mesh.nC)
w = np.random.rand(self.survey.dobs.shape[0])
wtJv = w.dot(self.p.Jvec(self.m0, v))
vtJtw = v.dot(self.p.Jtvec(self.m0, w))
passed = np.abs(wtJv - vtJtw) < 1e-8
print('Adjoint Test', np.abs(wtJv - vtJtw), passed)
self.assertTrue(passed)
def test_dataObj(self):
passed = Tests.checkDerivative(
lambda m: [self.dmis(m), self.dmis.deriv(m)],
self.m0,
plotIt=False,
num=3
)
self.assertTrue(passed)
if __name__ == '__main__':
unittest.main()
| 30.498433
| 73
| 0.551239
| 1,200
| 9,729
| 4.44
| 0.105833
| 0.027027
| 0.015015
| 0.036036
| 0.928866
| 0.928866
| 0.928866
| 0.928866
| 0.928866
| 0.928866
| 0
| 0.032253
| 0.324391
| 9,729
| 318
| 74
| 30.59434
| 0.778336
| 0.055813
| 0
| 0.859375
| 0
| 0
| 0.00698
| 0
| 0
| 0
| 0
| 0
| 0.046875
| 1
| 0.0625
| false
| 0.109375
| 0.050781
| 0
| 0.128906
| 0.019531
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
4e8c5f5d2a77cbd104979bcf7a6e4e29d8276b9a
| 3,070
|
py
|
Python
|
spaceplanner/migrations/0002_auto_20200724_1236.py
|
prznoc/osplanner
|
c58ff129fde3f1513738cf27f9d3692fb7d549ea
|
[
"MIT"
] | null | null | null |
spaceplanner/migrations/0002_auto_20200724_1236.py
|
prznoc/osplanner
|
c58ff129fde3f1513738cf27f9d3692fb7d549ea
|
[
"MIT"
] | null | null | null |
spaceplanner/migrations/0002_auto_20200724_1236.py
|
prznoc/osplanner
|
c58ff129fde3f1513738cf27f9d3692fb7d549ea
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.5 on 2020-07-24 10:36
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('spaceplanner', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='employeepreferences',
name='is_mac',
field=models.BooleanField(default=False, verbose_name='Mac'),
),
migrations.AlterField(
model_name='employeepreferences',
name='is_mac_preference',
field=models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(3)], verbose_name='Mac priority'),
),
migrations.AlterField(
model_name='employeepreferences',
name='large_screen',
field=models.BooleanField(default=False, verbose_name='Large screen'),
),
migrations.AlterField(
model_name='employeepreferences',
name='large_screen_preference',
field=models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(3)], verbose_name='Large_screen priority'),
),
migrations.AlterField(
model_name='employeepreferences',
name='noise',
field=models.BooleanField(default=False, verbose_name='Noise'),
),
migrations.AlterField(
model_name='employeepreferences',
name='noise_preference',
field=models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(3)], verbose_name='Noise priority'),
),
migrations.AlterField(
model_name='employeepreferences',
name='window',
field=models.BooleanField(default=False, verbose_name='Window'),
),
migrations.AlterField(
model_name='employeepreferences',
name='window_preference',
field=models.IntegerField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(3)], verbose_name='Window priority'),
),
migrations.AlterField(
model_name='workstationpreferences',
name='is_mac',
field=models.BooleanField(default=False, verbose_name='Mac'),
),
migrations.AlterField(
model_name='workstationpreferences',
name='large_screen',
field=models.BooleanField(default=False, verbose_name='Large_screen'),
),
migrations.AlterField(
model_name='workstationpreferences',
name='noise',
field=models.BooleanField(default=False, verbose_name='Noise'),
),
migrations.AlterField(
model_name='workstationpreferences',
name='window',
field=models.BooleanField(default=False, verbose_name='Window'),
),
]
| 40.933333
| 190
| 0.636156
| 272
| 3,070
| 7.044118
| 0.176471
| 0.125261
| 0.156576
| 0.181628
| 0.897182
| 0.893006
| 0.831942
| 0.724426
| 0.641962
| 0.641962
| 0
| 0.013449
| 0.249186
| 3,070
| 74
| 191
| 41.486486
| 0.817787
| 0.014658
| 0
| 0.735294
| 1
| 0
| 0.168376
| 0.036718
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.073529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4eabf2a813ce16099ad8420bfba6309ff1b07a0c
| 10,467
|
py
|
Python
|
listshuffler-be/tests/integration/test_shuffle.py
|
csiztom/listshuffler
|
d4fea7da3d506e93cca4a8e79c6f0ba98aebbac5
|
[
"MIT"
] | null | null | null |
listshuffler-be/tests/integration/test_shuffle.py
|
csiztom/listshuffler
|
d4fea7da3d506e93cca4a8e79c6f0ba98aebbac5
|
[
"MIT"
] | null | null | null |
listshuffler-be/tests/integration/test_shuffle.py
|
csiztom/listshuffler
|
d4fea7da3d506e93cca4a8e79c6f0ba98aebbac5
|
[
"MIT"
] | null | null | null |
from lambda_client import lambda_client
import hashlib
import json
from unittest import TestCase
def convert_to_hash(val, mul=0):
return str(int(hashlib.sha384((val + str(mul)).encode()).hexdigest(), 16))
class TestShuffle(TestCase):
def test_simple(self):
client = lambda_client()
response = client.invoke(FunctionName="testPostInstance",
Payload=json.dumps({"queryStringParameters": None, "body": None}))
payload = json.loads(response['Payload'].read())
admin_id = json.loads(payload['body'])['adminID']
assert payload['statusCode'] == 200
response = client.invoke(FunctionName="testPostList",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'listName': '', 'multiplicity': 1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
list_id1 = json.loads(payload['body'])['listID']
response = client.invoke(FunctionName="testPostListitem",
Payload=json.dumps({"queryStringParameters": {'listID': list_id1, 'listItem': ''}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
listitem_id1 = json.loads(payload['body'])['listItemID']
response = client.invoke(FunctionName="testPostList",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'listName': '', 'multiplicity': 1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
list_id2 = json.loads(payload['body'])['listID']
response = client.invoke(FunctionName="testPostListitem",
Payload=json.dumps({"queryStringParameters": {'listID': list_id2, 'listItem': ''}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
listitem_id2 = json.loads(payload['body'])['listItemID']
response = client.invoke(FunctionName="testPatchInstance",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'shuffledID': list_id1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
response = client.invoke(FunctionName="testPatchShuffle",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
response = client.invoke(FunctionName="testGetListitem",
Payload=json.dumps({"queryStringParameters": {'listItemID': listitem_id1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
assert convert_to_hash(listitem_id2) in json.loads(
payload['body'])['pairs']
def test_complex(self):
client = lambda_client()
response = client.invoke(FunctionName="testPostInstance",
Payload=json.dumps({"queryStringParameters": None, "body": None}))
payload = json.loads(response['Payload'].read())
admin_id = json.loads(payload['body'])['adminID']
assert payload['statusCode'] == 200
response = client.invoke(FunctionName="testPostList",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'listName': '', 'multiplicity': 1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
list_id1 = json.loads(payload['body'])['listID']
response = client.invoke(FunctionName="testPostListitem",
Payload=json.dumps({"queryStringParameters": {'listID': list_id1, 'listItem': ''}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
listitem_id1 = json.loads(payload['body'])['listItemID']
response = client.invoke(FunctionName="testPostList",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'listName': '', 'multiplicity': 2}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
list_id2 = json.loads(payload['body'])['listID']
response = client.invoke(FunctionName="testPostListitem",
Payload=json.dumps({"queryStringParameters": {'listID': list_id2, 'listItem': ''}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
listitem_id2 = json.loads(payload['body'])['listItemID']
response = client.invoke(FunctionName="testPostList",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'listName': '', 'multiplicity': 1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
list_id3 = json.loads(payload['body'])['listID']
response = client.invoke(FunctionName="testPostListitem",
Payload=json.dumps({"queryStringParameters": {'listID': list_id3, 'listItem': ''}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
listitem_id3 = json.loads(payload['body'])['listItemID']
response = client.invoke(FunctionName="testPatchInstance",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'shuffledID': list_id1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
response = client.invoke(FunctionName="testPatchShuffle",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
response = client.invoke(FunctionName="testGetListitem",
Payload=json.dumps({"queryStringParameters": {'listItemID': listitem_id1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
assert convert_to_hash(listitem_id2) in json.loads(
payload['body'])['pairs']
assert convert_to_hash(listitem_id2, 1) in json.loads(
payload['body'])['pairs']
assert convert_to_hash(listitem_id3) in json.loads(
payload['body'])['pairs']
def test_zero_prob(self):
client = lambda_client()
response = client.invoke(FunctionName="testPostInstance",
Payload=json.dumps({"queryStringParameters": None, "body": None}))
payload = json.loads(response['Payload'].read())
admin_id = json.loads(payload['body'])['adminID']
assert payload['statusCode'] == 200
response = client.invoke(FunctionName="testPostList",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'listName': '', 'multiplicity': 1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
list_id1 = json.loads(payload['body'])['listID']
response = client.invoke(FunctionName="testPostListitem",
Payload=json.dumps({"queryStringParameters": {'listID': list_id1, 'listItem': ''}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
listitem_id1 = json.loads(payload['body'])['listItemID']
response = client.invoke(FunctionName="testPostList",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'listName': '', 'multiplicity': 1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
list_id2 = json.loads(payload['body'])['listID']
response = client.invoke(FunctionName="testPostListitem",
Payload=json.dumps({"queryStringParameters": {'listID': list_id2, 'listItem': ''}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
listitem_id2 = json.loads(payload['body'])['listItemID']
response = client.invoke(FunctionName="testGetProbabilities",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'listID': list_id1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
probs = json.loads(payload['body'])['probabilities']
probs[listitem_id1][listitem_id2] = 0
response = client.invoke(FunctionName="testPatchProbabilities",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'listID': list_id1, 'probabilities': probs}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
response = client.invoke(FunctionName="testPatchInstance",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id, 'shuffledID': list_id1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
response = client.invoke(FunctionName="testPatchShuffle",
Payload=json.dumps({"queryStringParameters": {'adminID': admin_id}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 400
response = client.invoke(FunctionName="testGetListitem",
Payload=json.dumps({"queryStringParameters": {'listItemID': listitem_id1}, "body": None}))
payload = json.loads(response['Payload'].read())
assert payload['statusCode'] == 200
assert convert_to_hash(listitem_id2) not in json.loads(
payload['body'])['pairs']
| 58.803371
| 160
| 0.60256
| 961
| 10,467
| 6.484912
| 0.074922
| 0.098845
| 0.089859
| 0.143774
| 0.938864
| 0.937901
| 0.933087
| 0.933087
| 0.926508
| 0.926508
| 0
| 0.016951
| 0.239133
| 10,467
| 177
| 161
| 59.135593
| 0.76557
| 0
| 0
| 0.844156
| 0
| 0
| 0.219452
| 0.058278
| 0
| 0
| 0
| 0
| 0.214286
| 1
| 0.025974
| false
| 0
| 0.025974
| 0.006494
| 0.064935
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14cd7ecbaab70810f90e489e39b3d631799af049
| 211
|
py
|
Python
|
students/K33401/Goncharov_Vladimir/Lr2/django_project_goncharov/project_first_app/admin.py
|
ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021
|
bb91c91a56d21cec2b12ae4cc722eaa652a88420
|
[
"MIT"
] | 4
|
2020-09-03T15:41:42.000Z
|
2021-12-24T15:28:20.000Z
|
students/K33401/Goncharov_Vladimir/Lr2/django_project_goncharov/project_first_app/admin.py
|
ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021
|
bb91c91a56d21cec2b12ae4cc722eaa652a88420
|
[
"MIT"
] | 48
|
2020-09-13T20:22:42.000Z
|
2021-04-30T11:13:30.000Z
|
students/K33401/Goncharov_Vladimir/Lr2/django_project_goncharov/project_first_app/admin.py
|
ShubhamKunal/ITMO_ICT_WebDevelopment_2020-2021
|
bb91c91a56d21cec2b12ae4cc722eaa652a88420
|
[
"MIT"
] | 69
|
2020-09-06T10:32:37.000Z
|
2021-11-28T18:13:17.000Z
|
from django.contrib import admin
from .models import Driver, Car, DriverLicense, Ownership
admin.site.register(Driver)
admin.site.register(Car)
admin.site.register(Ownership)
admin.site.register(DriverLicense)
| 26.375
| 57
| 0.824645
| 28
| 211
| 6.214286
| 0.428571
| 0.206897
| 0.390805
| 0.298851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075829
| 211
| 8
| 58
| 26.375
| 0.892308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
14cec6b306f93521b43a1b811aa009b3c7256ba7
| 37
|
py
|
Python
|
biovis_report/version.py
|
go-choppy/choppy-report
|
f233ba3b2eaaa9af8936b736ede25233a043dde5
|
[
"MIT"
] | 1
|
2019-07-02T08:37:30.000Z
|
2019-07-02T08:37:30.000Z
|
biovis_report/version.py
|
go-choppy/choppy-report
|
f233ba3b2eaaa9af8936b736ede25233a043dde5
|
[
"MIT"
] | null | null | null |
biovis_report/version.py
|
go-choppy/choppy-report
|
f233ba3b2eaaa9af8936b736ede25233a043dde5
|
[
"MIT"
] | null | null | null |
def get_version():
return '0.5.6'
| 18.5
| 18
| 0.621622
| 7
| 37
| 3.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0.189189
| 37
| 2
| 19
| 18.5
| 0.633333
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
0912f1de15ea2d8b956ad089f1cafac5ddc22063
| 91,026
|
py
|
Python
|
declaraciones/declaracion/migrations/0001_initial.py
|
gob-cdmx/declaraciones
|
90347c1572fa5b8137c5e0d23e6a7c6b2a0b2311
|
[
"MIT"
] | 2
|
2019-10-17T02:40:12.000Z
|
2019-10-17T22:51:36.000Z
|
declaraciones/declaracion/migrations/0001_initial.py
|
gob-cdmx/declaraciones
|
90347c1572fa5b8137c5e0d23e6a7c6b2a0b2311
|
[
"MIT"
] | 1
|
2019-10-02T20:23:12.000Z
|
2019-10-02T20:23:12.000Z
|
declaraciones/declaracion/migrations/0001_initial.py
|
gob-cdmx/declaraciones
|
90347c1572fa5b8137c5e0d23e6a7c6b2a0b2311
|
[
"MIT"
] | 4
|
2019-08-20T21:16:04.000Z
|
2021-07-01T03:08:10.000Z
|
# Generated by Django 2.2 on 2019-04-01 19:39
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import mptt.fields
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ActivosBienes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('id_activobien', models.IntegerField(null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='CatActivoBien',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('activo_bien', models.CharField(max_length=45)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatAmbitosLaborales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ambito_laboral', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatDocumentosObtenidos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('documento_obtenido', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatEntesPublicos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ente_publico', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatEntidadesFederativas',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('entidad_federativa', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatEstadosCiviles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('estado_civil', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatEstatusDeclaracion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('estatus_declaracion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatEstatusEstudios',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('estatus_estudios', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatFormasAdquisiciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('forma_adquisicion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatFuncionesPrincipales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('funcion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatGradosAcademicos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('grado_academico', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatMonedas',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('moneda', models.CharField(max_length=255)),
('moneda_abrev', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatNaturalezaMembresia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('naturaleza_membresia', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatOrdenesGobierno',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('orden_gobierno', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatPaises',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pais', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(max_length=3)),
],
),
migrations.CreateModel(
name='CatPoderes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('poder', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatRegimenesMatrimoniales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('regimen_matrimonial', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatSectoresIndustria',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sector_industria', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTipoParticipacion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_participacion', models.CharField(blank=True, max_length=45, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTipoPersona',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_persona', models.CharField(blank=True, max_length=45, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposAcreedores',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_acreedor', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposActividad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_actividad', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposAdeudos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_adeudo', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposApoyos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_apoyo', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposBeneficios',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_beneficio', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposBienes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_bien', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposDeclaracion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_declaracion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposEspecificosInversiones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_especifico_inversion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposFideicomisos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_fideicomiso', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposIngresosVarios',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_ingreso_varios', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposInmuebles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_inmueble', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposInstituciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_institucion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposInversiones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_inversion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposMetales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_metal', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposMuebles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_mueble', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposOperaciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_operacion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposPasivos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_pasivo', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposRelacionesPersonales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_relacion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('grupo_familia', models.IntegerField(blank=True, null=True)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposRepresentaciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_representacion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTiposTitulares',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_titular', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTipoVia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_via', models.CharField(blank=True, max_length=45, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatTitularTiposRelaciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_relacion', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='CatUnidadesTemporales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('unidad_temporal', models.CharField(blank=True, max_length=45, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('default', models.IntegerField(default=0)),
('orden', models.IntegerField(default=1)),
('codigo', models.CharField(blank=True, max_length=45)),
],
),
migrations.CreateModel(
name='Declaraciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('folio', models.UUIDField(default=uuid.uuid4, editable=False)),
('fecha_recepcion', models.DateField(blank=True, null=True)),
('fecha_declaracion', models.DateField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cat_tipos_declaracion', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposDeclaracion')),
],
),
migrations.CreateModel(
name='Domicilios',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('municipio', models.CharField(blank=True, max_length=255)),
('cp', models.CharField(blank=True, max_length=255)),
('colonia', models.CharField(blank=True, max_length=255)),
('nombre_via', models.CharField(blank=True, max_length=255)),
('num_exterior', models.CharField(blank=True, max_length=255)),
('num_interior', models.CharField(blank=True, max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cat_entidades_federativas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEntidadesFederativas')),
('cat_pais', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPaises')),
('cat_tipo_via', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTipoVia')),
],
),
migrations.CreateModel(
name='InfoPersonalVar',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('es_fisica', models.BooleanField(blank=True, default=None, null=True)),
('razon_social', models.CharField(blank=True, max_length=255, null=True)),
('nombres', models.CharField(blank=True, max_length=255)),
('apellido1', models.CharField(blank=True, max_length=255)),
('apellido2', models.CharField(blank=True, max_length=255)),
('curp', models.CharField(blank=True, max_length=255)),
('rfc', models.CharField(blank=True, max_length=255)),
('fecha_nacimiento', models.DateField(blank=True, null=True)),
('num_id_identificacion', models.CharField(blank=True, max_length=255)),
('email_personal', models.CharField(blank=True, max_length=255)),
('tel_particular', models.CharField(blank=True, max_length=255)),
('tel_movil', models.CharField(blank=True, max_length=255)),
('otro_sector', models.CharField(blank=True, max_length=255, null=True)),
('otro_estado_civil', models.CharField(blank=True, max_length=255, null=True)),
('actividad_economica', models.CharField(blank=True, max_length=255, null=True)),
('ocupacion_girocomercial', models.CharField(blank=True, max_length=255, null=True)),
('nombre_negocio', models.CharField(blank=True, max_length=255, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cat_entidades_federativas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEntidadesFederativas')),
('cat_estados_civiles', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEstadosCiviles')),
('cat_pais', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPaises')),
('cat_regimenes_matrimoniales', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatRegimenesMatrimoniales')),
('cat_sectores_industria', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatSectoresIndustria')),
('cat_tipo_persona', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTipoPersona')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('domicilios', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Domicilios')),
],
),
migrations.CreateModel(
name='Observaciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('observacion', models.CharField(blank=True, max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='SueldosPublicos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rfc', models.CharField(blank=True, max_length=255)),
('ingreso_bruto_anual', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('duracion_dias', models.DecimalField(blank=True, decimal_places=2, max_digits=6, null=True)),
('duracion_meses', models.DecimalField(blank=True, decimal_places=2, max_digits=6, null=True)),
('duracion_anual', models.DecimalField(blank=True, decimal_places=2, max_digits=6, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('es_transaccion', models.BooleanField(blank=True, default=None, null=True)),
('fecha_transaccion', models.DateField(blank=True, null=True)),
('otro_ente', models.CharField(blank=True, max_length=255, null=True)),
('cat_entes_publicos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEntesPublicos')),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='SociosComerciales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('actividad_vinculante', models.CharField(blank=True, max_length=255)),
('tipo_vinculo', models.CharField(blank=True, max_length=255)),
('antiguedad_vinculo', models.CharField(blank=True, max_length=255)),
('rfc_entidad_vinculante', models.CharField(blank=True, max_length=255)),
('porcentaje_participacion', models.DecimalField(blank=True, decimal_places=2, max_digits=5, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
('socio_infopersonalvar', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalVar')),
],
),
migrations.CreateModel(
name='Secciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('seccion', models.CharField(max_length=255)),
('url', models.CharField(blank=True, max_length=255)),
('parametro', models.CharField(blank=True, default='', max_length=255)),
('order', models.IntegerField(default=1)),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='declaracion.Secciones')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='SeccionDeclaracion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('estatus', models.IntegerField(choices=[(0, 'Pendiente'), (1, 'Completa'), (2, 'Incompleta')], default=0)),
('aplica', models.BooleanField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='declaracion.Observaciones', verbose_name='')),
('seccion', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='status', to='declaracion.Secciones')),
],
),
migrations.CreateModel(
name='Representaciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otra_representacion', models.CharField(blank=True, max_length=255)),
('fecha_inicio', models.DateField(blank=True, null=True)),
('pagado', models.BooleanField(blank=True, default=None, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('es_representacion_activa', models.BooleanField(blank=True, default=None, null=True)),
('monto', models.DecimalField(blank=True, decimal_places=2, max_digits=13, null=True)),
('es_mueble', models.BooleanField(blank=True, default=None, null=True)),
('otro_mueble', models.CharField(blank=True, max_length=255, null=True)),
('cat_tipos_muebles', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposMuebles')),
('cat_tipos_representaciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposRepresentaciones')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('info_personal_var', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalVar')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='OtrasPartes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha_inicio_relacion', models.DateField(blank=True, null=True)),
('otra_relacion', models.CharField(blank=True, max_length=255)),
('ocupacion_profesion', models.CharField(blank=True, max_length=255)),
('intereses_comunes', models.BooleanField(blank=True, default=None, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cat_titular_tipos_relaciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTitularTiposRelaciones')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('declarante_infopersonalvar', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='declarante_otras_partes', to='declaracion.InfoPersonalVar')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
('otraspartes_infopersonalvar', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='otraspartes_otras_partes', to='declaracion.InfoPersonalVar')),
],
),
migrations.CreateModel(
name='Nacionalidades',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cat_paises', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='nacionalidades_cat_paises', to='declaracion.CatPaises')),
('info_personal_var', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='nacionalidades_info_personal_var', to='declaracion.InfoPersonalVar')),
],
),
migrations.CreateModel(
name='MueblesNoRegistrables',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otra_operacion', models.CharField(blank=True, max_length=255)),
('otro_bien_mueble', models.CharField(blank=True, max_length=255)),
('descripcion_bien', models.CharField(blank=True, max_length=255)),
('otro_titular', models.CharField(blank=True, max_length=255)),
('otra_forma', models.CharField(blank=True, max_length=255)),
('fecha_adquisicion', models.DateField(blank=True, null=True)),
('precio_adquisicion', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('activos_bienes', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.ActivosBienes')),
('cat_formas_adquisiciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatFormasAdquisiciones')),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('cat_tipos_muebles', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposMuebles')),
('cat_tipos_operaciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposOperaciones')),
('cat_tipos_titulares', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposTitulares')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='Membresias',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otras_instituciones', models.CharField(blank=True, max_length=255)),
('nombre_institucion', models.CharField(blank=True, max_length=255)),
('otro_sector', models.CharField(blank=True, max_length=255)),
('puesto_rol', models.CharField(blank=True, max_length=255)),
('fecha_inicio', models.DateField(blank=True, null=True)),
('pagado', models.BooleanField(blank=True, default=None, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('otra_naturaleza', models.CharField(blank=True, max_length=255, null=True)),
('cat_naturaleza_membresia', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatNaturalezaMembresia')),
('cat_sectores_industria', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatSectoresIndustria')),
('cat_tipos_instituciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposInstituciones')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('domicilios', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Domicilios')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='Inversiones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otra_operacion', models.CharField(blank=True, max_length=255)),
('otra_inversion', models.CharField(blank=True, max_length=255)),
('otro_tipo_especifico', models.CharField(blank=True, max_length=255)),
('num_cuenta', models.CharField(blank=True, max_length=255)),
('otra_forma', models.CharField(blank=True, max_length=255)),
('fecha_inicio', models.DateField(blank=True, null=True)),
('monto_original', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('tasa_interes', models.DecimalField(blank=True, decimal_places=2, max_digits=5, null=True)),
('saldo_actual', models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True)),
('plazo', models.CharField(blank=True, max_length=255)),
('cat_tipos_titulares', models.CharField(blank=True, max_length=255)),
('otro_tipo_titular', models.CharField(blank=True, max_length=255)),
('porcentaje_inversion', models.DecimalField(blank=True, decimal_places=2, max_digits=5, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cat_formas_adquisiciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatFormasAdquisiciones')),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('cat_paises', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPaises')),
('cat_tipos_especificos_inversiones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposEspecificosInversiones')),
('cat_tipos_inversiones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposInversiones')),
('cat_tipos_operaciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposOperaciones')),
('cat_unidades_temporales', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatUnidadesTemporales')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('info_personal_var', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalVar')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='IngresosVarios',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ingreso_bruto_anual', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('duracion_dias', models.DecimalField(blank=True, decimal_places=2, max_digits=6, null=True)),
('duracion_meses', models.DecimalField(blank=True, decimal_places=2, max_digits=6, null=True)),
('duracion_anual', models.DecimalField(blank=True, decimal_places=2, max_digits=6, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('otra_actividad', models.CharField(blank=True, max_length=255)),
('es_transaccion', models.DateField(blank=True, null=True)),
('es_mueble', models.BooleanField(blank=True, default=None, null=True)),
('otro_mueble', models.CharField(blank=True, max_length=255, null=True)),
('descripcion_actividad', models.CharField(blank=True, max_length=255, null=True)),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('cat_tipos_actividad', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposActividad')),
('cat_tipos_ingresos_varios', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposIngresosVarios')),
('cat_tipos_muebles', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposMuebles')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('info_personal_var', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalVar')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.AddField(
model_name='infopersonalvar',
name='nacionalidades',
field=models.ManyToManyField(blank=True, related_name='info_personal_var_nacionalidaes', through='declaracion.Nacionalidades', to='declaracion.CatPaises'),
),
migrations.AddField(
model_name='infopersonalvar',
name='observaciones',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones'),
),
migrations.CreateModel(
name='InfoPersonalFija',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombres', models.CharField(blank=True, max_length=255)),
('apellido1', models.CharField(blank=True, max_length=255)),
('apellido2', models.CharField(blank=True, max_length=255)),
('curp', models.CharField(blank=True, max_length=255)),
('rfc', models.CharField(blank=True, max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('fecha_nacimiento', models.DateField(blank=True, null=True)),
('otro_ente', models.CharField(blank=True, max_length=255, null=True)),
('fecha_inicio', models.DateField(blank=True, null=True)),
('cat_ente_publico', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEntesPublicos')),
('cat_entidades_federativas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEntidadesFederativas')),
('cat_pais', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPaises')),
('usuario', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Fideicomisos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otra_operacion', models.CharField(blank=True, max_length=255)),
('otro_fideicomiso', models.CharField(blank=True, max_length=255)),
('objetivo_fideicomiso', models.CharField(blank=True, max_length=255)),
('num_registro', models.CharField(blank=True, max_length=255)),
('fecha_creacion', models.DateField(blank=True, null=True)),
('plazo_vigencia', models.CharField(blank=True, max_length=255)),
('valor_fideicomiso', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('ingreso_monetario', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('institucion_fiduciaria', models.CharField(blank=True, max_length=255)),
('fideicomisario', models.CharField(blank=True, max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('activos_bienes', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.ActivosBienes')),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('cat_paises', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPaises')),
('cat_tipos_fideicomisos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposFideicomisos')),
('cat_tipos_operaciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposOperaciones')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='ExperienciaLaboral',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otro_poder', models.CharField(blank=True, max_length=255)),
('nombre_institucion', models.CharField(blank=True, max_length=255)),
('unidad_area_administrativa', models.CharField(blank=True, max_length=255)),
('otro_sector', models.CharField(blank=True, max_length=255)),
('jerarquia_rango', models.CharField(blank=True, max_length=255)),
('cargo_puesto', models.CharField(blank=True, max_length=255)),
('fecha_ingreso', models.DateField(blank=True, null=True)),
('fecha_salida', models.DateField(blank=True, null=True)),
('otra_funcion', models.CharField(blank=True, max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('otro_ambitos_laborales', models.CharField(blank=True, max_length=255)),
('cat_ambitos_laborales', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatAmbitosLaborales')),
('cat_funciones_principales', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatFuncionesPrincipales')),
('cat_ordenes_gobierno', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatOrdenesGobierno')),
('cat_poderes', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPoderes')),
('cat_sectores_industria', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatSectoresIndustria')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('domicilios', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Domicilios')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='Encargos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('empleo_cargo_comision', models.CharField(blank=True, max_length=255)),
('honorarios', models.BooleanField(blank=True, default=None, null=True)),
('nivel_encargo', models.CharField(blank=True, max_length=255)),
('area_adscripcion', models.CharField(blank=True, max_length=255)),
('posesion_conclusion', models.DateField(blank=True, null=True)),
('telefono_laboral', models.CharField(blank=True, max_length=255)),
('email_laboral', models.CharField(blank=True, max_length=255)),
('otro_sector', models.CharField(blank=True, max_length=255)),
('otra_funcion', models.CharField(blank=True, max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('posesion_inicio', models.DateField(blank=True, null=True)),
('telefono_extension', models.CharField(blank=True, max_length=255)),
('otro_naturalezas_juridicas', models.CharField(blank=True, max_length=255)),
('otro_ente', models.CharField(blank=True, max_length=255, null=True)),
('cat_entes_publicos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEntesPublicos')),
('cat_funciones_principales', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatFuncionesPrincipales')),
('cat_ordenes_gobierno', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatOrdenesGobierno')),
('cat_paises', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPaises')),
('cat_poderes', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPoderes')),
('cat_sectores_industria', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatSectoresIndustria')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('domicilios', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Domicilios')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='EmpresasSociedades',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rol_empresa', models.CharField(blank=True, max_length=255)),
('actividad_economica', models.BooleanField(blank=True, default=None, null=True)),
('porcentaje_participacion', models.DecimalField(blank=True, decimal_places=2, max_digits=5, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('declarante_infopersonalvar', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='declarante_empresas_sociedades', to='declaracion.InfoPersonalVar')),
('empresa_infopersonalvar', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='empresa_empresas_sociedades', to='declaracion.InfoPersonalVar')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='EfectivoMetales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otro_tipo_operacion', models.CharField(blank=True, max_length=255)),
('monto_efectivo', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('otro_metal', models.CharField(blank=True, max_length=255)),
('unidades', models.CharField(blank=True, max_length=255)),
('monto_metales', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('otra_forma', models.CharField(blank=True, max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cat_formas_adquisiciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatFormasAdquisiciones')),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('cat_tipos_metales', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposMetales')),
('cat_tipos_operaciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposOperaciones')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='DeudasOtros',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otra_operacion', models.CharField(blank=True, max_length=255)),
('otro_tipo_acreedor', models.CharField(blank=True, max_length=255)),
('otro_tipo_adeudo', models.CharField(blank=True, max_length=255)),
('numero_cuenta', models.CharField(blank=True, max_length=255)),
('fecha_generacion', models.DateField(blank=True, null=True)),
('monto_original', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('tasa_interes', models.DecimalField(blank=True, decimal_places=2, max_digits=5, null=True)),
('saldo_pendiente', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('monto_abonado', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('plazo', models.CharField(blank=True, max_length=255)),
('otro_titular', models.CharField(blank=True, max_length=255)),
('porcentaje_adeudo', models.DecimalField(blank=True, decimal_places=2, max_digits=5, null=True)),
('garantia', models.BooleanField(blank=True, default=None, null=True)),
('nombre_garantes', models.CharField(blank=True, max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('acreedor_infopersonalvar', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalVar')),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('cat_paises', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPaises')),
('cat_tipos_acreedores', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposAcreedores')),
('cat_tipos_adeudos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposAdeudos')),
('cat_tipos_operaciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposOperaciones')),
('cat_tipos_pasivos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposPasivos')),
('cat_tipos_titulares', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposTitulares')),
('cat_unidades_temporales', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatUnidadesTemporales')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.AddField(
model_name='declaraciones',
name='info_personal_fija',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalFija'),
),
migrations.CreateModel(
name='DatosCurriculares',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('institucion_educativa', models.CharField(blank=True, max_length=255)),
('municipio', models.CharField(blank=True, max_length=255)),
('carrera_o_area', models.CharField(blank=True, max_length=255)),
('conclusion', models.CharField(blank=True, max_length=255)),
('cedula_profesional', models.CharField(blank=True, max_length=255)),
('diploma', models.CharField(blank=True, max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cat_documentos_obtenidos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatDocumentosObtenidos')),
('cat_entidades_federativas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEntidadesFederativas')),
('cat_estatus_estudios', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEstatusEstudios')),
('cat_grados_academicos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatGradosAcademicos')),
('cat_pais', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPaises')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='CuentasPorCobrar',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha_prestamo', models.DateField(blank=True, null=True)),
('monto_original', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('tasa_interes', models.DecimalField(blank=True, decimal_places=2, max_digits=5, null=True)),
('saldo_pendiente', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('fecha_vencimiento', models.DateField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('activos_bienes', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.ActivosBienes')),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('info_personal_var', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalVar')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='ConyugeDependientes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('habita_domicilio', models.BooleanField(blank=True, default=None, null=True)),
('medio_contacto', models.CharField(blank=True, max_length=255)),
('ingresos_propios', models.BooleanField(blank=True, default=None, null=True)),
('ocupacion_profesion', models.CharField(blank=True, max_length=255)),
('otro_sector', models.CharField(blank=True, max_length=255)),
('proveedor_contratista', models.BooleanField(blank=True, default=None, null=True)),
('intereses_comunes', models.BooleanField(blank=True, default=None, null=True)),
('cabildeo_sector', models.BooleanField(blank=True, default=None, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('otra_relacion', models.CharField(blank=True, max_length=255)),
('cat_tipos_relaciones_personales', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposRelacionesPersonales')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('declarante_infopersonalvar', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='declarante_conyugue_dependientes', to='declaracion.InfoPersonalVar')),
('dependiente_infopersonalvar', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='dependiente_conyugue_dependientes', to='declaracion.InfoPersonalVar')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='ClientesPrincipales',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('porcentaje_facturacion_cliente', models.DecimalField(blank=True, decimal_places=2, max_digits=5, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('nombre_encargado', models.CharField(blank=True, max_length=255, null=True)),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('info_personal_var', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalVar')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='BienesPersonas',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('porcentaje', models.DecimalField(blank=True, decimal_places=2, max_digits=5, null=True)),
('es_propietario', models.BooleanField(blank=True, default=None, null=True)),
('precio_adquision', models.DecimalField(blank=True, decimal_places=2, max_digits=13, null=True)),
('el_adquirio', models.BooleanField(blank=True, default=None, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('otra_relacion', models.CharField(blank=True, max_length=255, null=True)),
('bien_activosbienes', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.ActivosBienes')),
('cat_tipo_participacion', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTipoParticipacion')),
('otra_persona', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='otra_persona', to='declaracion.InfoPersonalVar')),
('propietario_infopersonalvar', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalVar')),
('tipo_relacion', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposRelacionesPersonales')),
],
),
migrations.CreateModel(
name='BienesMuebles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otra_operacion', models.CharField(blank=True, max_length=255)),
('otro_tipo_mueble', models.CharField(blank=True, max_length=255)),
('marca', models.CharField(blank=True, max_length=255)),
('submarca', models.CharField(blank=True, max_length=255)),
('modelo', models.IntegerField(blank=True, null=True)),
('num_serie', models.CharField(blank=True, max_length=255)),
('otro_titular', models.CharField(blank=True, max_length=255)),
('num_registro_vehicular', models.CharField(blank=True, max_length=255)),
('otra_forma', models.CharField(blank=True, max_length=255)),
('otro_sector', models.CharField(blank=True, max_length=255)),
('fecha_adquisicion', models.DateField(blank=True, null=True)),
('precio_adquisicion', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('activos_bienes', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.ActivosBienes')),
('cat_entidades_federativas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEntidadesFederativas')),
('cat_formas_adquisiciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatFormasAdquisiciones')),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('cat_paises', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatPaises')),
('cat_tipos_muebles', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposMuebles')),
('cat_tipos_operaciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposOperaciones')),
('cat_tipos_titulares', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposTitulares')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='BienesIntangibles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otra_operacion', models.CharField(blank=True, max_length=255)),
('propietario_registrado', models.CharField(blank=True, max_length=255)),
('descripcion', models.CharField(blank=True, max_length=255)),
('otra_dependencia', models.CharField(blank=True, max_length=255)),
('num_registro', models.CharField(blank=True, max_length=255)),
('fecha_registro', models.DateField(blank=True, null=True)),
('otro_sector', models.CharField(blank=True, max_length=255)),
('precio_adquisicion', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('otra_forma', models.CharField(blank=True, max_length=255)),
('fecha_vencimiento', models.DateField(blank=True, null=True)),
('precio_total_adquisicion', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('otro_ente', models.CharField(blank=True, max_length=255, null=True)),
('activos_bienes', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.ActivosBienes')),
('cat_entes_publicos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatEntesPublicos')),
('cat_formas_adquisiciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatFormasAdquisiciones')),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('cat_sectores_industria', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatSectoresIndustria')),
('cat_tipos_operaciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposOperaciones')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='BienesInmuebles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('superficie_terreno', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('superficie_construccion', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('otro_titular', models.CharField(blank=True, max_length=255)),
('num_escritura_publica', models.CharField(blank=True, max_length=255)),
('num_registro_publico', models.CharField(blank=True, max_length=255)),
('folio_real', models.CharField(blank=True, max_length=255)),
('fecha_contrato_compra', models.DateField(blank=True, null=True)),
('otra_forma', models.CharField(blank=True, max_length=255)),
('fecha_adquisicion', models.DateField(blank=True, null=True)),
('precio_adquisicion', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('valor_catastral', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('otra_operacion', models.CharField(blank=True, max_length=255, null=True)),
('otro_inmueble', models.CharField(blank=True, max_length=255, null=True)),
('activos_bienes', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.ActivosBienes')),
('cat_formas_adquisiciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatFormasAdquisiciones')),
('cat_monedas', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatMonedas')),
('cat_tipos_inmuebles', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposInmuebles')),
('cat_tipos_operaciones', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposOperaciones')),
('cat_tipos_titulares', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposTitulares')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('domicilios', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Domicilios')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='BeneficiosGratuitos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('otros_beneficios', models.CharField(blank=True, max_length=255)),
('origen_beneficio', models.CharField(blank=True, max_length=255)),
('otro_sector', models.CharField(blank=True, max_length=255)),
('valor_beneficio', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cat_sectores_industria', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatSectoresIndustria')),
('cat_tipos_beneficios', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposBeneficios')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='BeneficiosEspecie',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_bien_servicio', models.CharField(blank=True, max_length=255)),
('valor_mercado', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('otro_familiar', models.CharField(blank=True, max_length=255)),
('otra_relacion', models.CharField(blank=True, max_length=255)),
('fecha_inicio', models.DateField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('cat_sectores_industria', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatSectoresIndustria')),
('cat_tipos_relaciones_personales', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposRelacionesPersonales')),
('declaraciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones')),
('domicilios', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Domicilios')),
('info_personal_var', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalVar')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.CreateModel(
name='Apoyos',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre_programa', models.CharField(blank=True, max_length=255)),
('institucion_otorgante', models.CharField(blank=True, max_length=255)),
('otro_apoyo', models.CharField(blank=True, max_length=255)),
('valor_anual', models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('beneficiario_infopersonalvar', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.InfoPersonalVar')),
('cat_ordenes_gobierno', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatOrdenesGobierno')),
('cat_tipos_apoyos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatTiposApoyos')),
('observaciones', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Observaciones')),
],
),
migrations.AddField(
model_name='activosbienes',
name='cat_activo_bien',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.CatActivoBien'),
),
migrations.AddField(
model_name='activosbienes',
name='declaraciones',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='declaracion.Declaraciones'),
),
]
| 72.588517
| 199
| 0.632292
| 9,430
| 91,026
| 5.910817
| 0.047084
| 0.061519
| 0.067816
| 0.081379
| 0.905237
| 0.900196
| 0.893073
| 0.870575
| 0.83272
| 0.817166
| 0
| 0.012215
| 0.226507
| 91,026
| 1,253
| 200
| 72.646449
| 0.779443
| 0.000472
| 0
| 0.724719
| 1
| 0
| 0.179323
| 0.083182
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004013
| 0
| 0.007223
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
094c9db42885daf882fc54d0e8e0f1ed9e2f35a8
| 8,698
|
py
|
Python
|
tests/unit/confidant/routes/certificates_test.py
|
chadwhitacre/confidant
|
dd788147b355f760767cf3b9487671c67948ade3
|
[
"Apache-2.0"
] | 1,820
|
2015-11-04T17:57:16.000Z
|
2022-03-31T16:47:24.000Z
|
tests/unit/confidant/routes/certificates_test.py
|
chadwhitacre/confidant
|
dd788147b355f760767cf3b9487671c67948ade3
|
[
"Apache-2.0"
] | 186
|
2015-11-04T18:21:52.000Z
|
2022-01-14T20:31:31.000Z
|
tests/unit/confidant/routes/certificates_test.py
|
isabella232/confidant
|
3dac318c3e1f29bae5771084ad29a4bc121f1771
|
[
"Apache-2.0"
] | 136
|
2015-11-04T19:23:14.000Z
|
2022-02-25T01:51:29.000Z
|
import json
from confidant.app import create_app
from confidant.services import certificatemanager
def test_get_certificate(mocker):
app = create_app()
mocker.patch('confidant.settings.USE_AUTH', False)
mocker.patch(
'confidant.authnz.get_logged_in_user',
return_value='badservice',
)
mocker.patch(
'confidant.routes.certificates.authnz.user_is_user_type',
return_value=True,
)
mocker.patch(
'confidant.routes.certificates.authnz.user_is_service',
return_value=False,
)
ret = app.test_client().get(
'/v1/certificates/development/test.example.com',
follow_redirects=False,
)
assert ret.status_code == 403
mocker.patch(
'confidant.routes.certificates.authnz.user_is_user_type',
return_value=False,
)
mocker.patch(
'confidant.routes.certificates.authnz.get_logged_in_user',
return_value='test@example.com',
)
mocker.patch(
'confidant.routes.certificates.acl_module_check',
return_value=False,
)
ret = app.test_client().get(
'/v1/certificates/development/test.example.com',
follow_redirects=False,
)
assert ret.status_code == 403
mocker.patch(
'confidant.routes.certificates.acl_module_check',
return_value=True,
)
mocker.patch('confidant.authnz.get_logged_in_user', return_value='test')
ca_object = certificatemanager.CertificateAuthority('development')
mocker.patch(
('confidant.routes.certificates.certificatemanager.get_ca'),
return_value=ca_object,
)
ca_object.issue_certificate_with_key = mocker.Mock(
return_value={
'certificate': 'test_certificate',
'certificate_chain': 'test_certificate_chain',
'key': 'test_key',
},
)
ret = app.test_client().get(
'/v1/certificates/development/test.example.com',
follow_redirects=False,
)
json_data = json.loads(ret.data)
assert ret.status_code == 200
assert json_data == {
'certificate': 'test_certificate',
'certificate_chain': 'test_certificate_chain',
'key': 'test_key',
}
ca_object.issue_certificate_with_key = mocker.Mock(
side_effect=certificatemanager.CertificateNotReadyError(),
)
ret = app.test_client().get(
'/v1/certificates/development/test.example.com',
follow_redirects=False,
)
assert ret.status_code == 429
assert ret.headers['Retry-After'] == '2'
def test_get_certificate_from_csr(mocker):
ca_object = certificatemanager.CertificateAuthority('development')
key = ca_object.generate_key()
csr = ca_object.generate_csr(key, 'test.example.com')
pem_csr = ca_object.encode_csr(csr)
app = create_app()
mocker.patch('confidant.settings.USE_AUTH', False)
ret = app.test_client().post(
'/v1/certificates/development',
data=json.dumps({}),
content_type='application/json',
follow_redirects=False,
)
assert ret.status_code == 400
ret = app.test_client().post(
'/v1/certificates/development',
data=json.dumps({'validity': 7}),
content_type='application/json',
follow_redirects=False,
)
assert ret.status_code == 400
ret = app.test_client().post(
'/v1/certificates/development',
data=json.dumps({'csr': 'invalid_csr'}),
content_type='application/json',
follow_redirects=False,
)
assert ret.status_code == 400
mocker.patch(
'confidant.routes.certificates.authnz.user_is_user_type',
return_value=True,
)
mocker.patch(
'confidant.routes.certificates.authnz.user_is_service',
return_value=False,
)
mocker.patch(
'confidant.routes.certificates.authnz.get_logged_in_user',
return_value='badservice',
)
ret = app.test_client().post(
'/v1/certificates/development',
data=json.dumps({
'csr': pem_csr,
'validity': 7,
}),
content_type='application/json',
follow_redirects=False,
)
assert ret.status_code == 403
mocker.patch(
'confidant.routes.certificates.authnz.user_is_user_type',
return_value=False,
)
mocker.patch(
'confidant.routes.certificates.authnz.get_logged_in_user',
return_value='test@example.com',
)
mocker.patch(
'confidant.routes.certificates.acl_module_check',
return_value=False,
)
ret = app.test_client().post(
'/v1/certificates/development',
data=json.dumps({
'csr': pem_csr,
'validity': 7,
}),
content_type='application/json',
follow_redirects=False,
)
assert ret.status_code == 403
mocker.patch(
'confidant.routes.certificates.acl_module_check',
return_value=True,
)
mocker.patch(
('confidant.routes.certificates.certificatemanager.get_ca'),
return_value=ca_object,
)
ca_object.issue_certificate = mocker.Mock(
return_value='test-certificate-arn',
)
ca_object.get_certificate_from_arn = mocker.Mock(
return_value={
'certificate': 'test_certificate',
'certificate_chain': 'test_certificate_chain',
},
)
ret = app.test_client().post(
'/v1/certificates/development',
data=json.dumps({
'csr': pem_csr,
'validity': 7,
}),
content_type='application/json',
follow_redirects=False,
)
json_data = json.loads(ret.data)
assert ret.status_code == 200
assert json_data == {
'certificate': 'test_certificate',
'certificate_chain': 'test_certificate_chain',
}
def test_list_cas(mocker):
app = create_app()
mocker.patch('confidant.settings.USE_AUTH', False)
mocker.patch(
'confidant.routes.certificates.authnz.get_logged_in_user',
return_value='test@example.com',
)
mocker.patch(
'confidant.routes.certificates.acl_module_check',
return_value=False,
)
ret = app.test_client().get(
'/v1/cas',
follow_redirects=False,
)
assert ret.status_code == 403
mocker.patch(
'confidant.routes.certificates.acl_module_check',
return_value=True,
)
mocker.patch('confidant.authnz.get_logged_in_user', return_value='test')
cas = [{
'ca': 'development',
'certificate': 'test_certificate',
'certificate_chain': 'test_certificate_chain',
'tags': {'environment': 'development'},
}]
mocker.patch(
('confidant.routes.certificates.certificatemanager.list_cas'),
return_value=cas,
)
ret = app.test_client().get('/v1/cas', follow_redirects=False)
json_data = json.loads(ret.data)
assert ret.status_code == 200
assert json_data == {
'cas': [{
'ca': 'development',
'certificate': 'test_certificate',
'certificate_chain': 'test_certificate_chain',
'tags': {'environment': 'development'},
}],
}
def test_get_ca(mocker):
app = create_app()
mocker.patch('confidant.settings.USE_AUTH', False)
mocker.patch(
'confidant.routes.certificates.authnz.get_logged_in_user',
return_value='test@example.com',
)
mocker.patch(
'confidant.routes.certificates.acl_module_check',
return_value=False,
)
ret = app.test_client().get(
'/v1/cas',
follow_redirects=False,
)
assert ret.status_code == 403
mocker.patch(
'confidant.routes.certificates.acl_module_check',
return_value=True,
)
mocker.patch('confidant.authnz.get_logged_in_user', return_value='test')
ca_object = certificatemanager.CertificateAuthority('development')
mocker.patch(
('confidant.routes.certificates.certificatemanager.get_ca'),
return_value=ca_object,
)
ca_object.get_certificate_authority_certificate = mocker.Mock(
return_value={
'ca': 'development',
'certificate': 'test_certificate',
'certificate_chain': 'test_certificate_chain',
'tags': {'environment': 'development'},
},
)
ret = app.test_client().get('/v1/cas/development', follow_redirects=False)
json_data = json.loads(ret.data)
assert ret.status_code == 200
assert json_data == {
'ca': 'development',
'certificate': 'test_certificate',
'certificate_chain': 'test_certificate_chain',
'tags': {'environment': 'development'},
}
| 29.993103
| 78
| 0.634399
| 921
| 8,698
| 5.731813
| 0.090119
| 0.064596
| 0.117446
| 0.113279
| 0.915135
| 0.894298
| 0.894298
| 0.879144
| 0.866263
| 0.866263
| 0
| 0.009248
| 0.241665
| 8,698
| 289
| 79
| 30.096886
| 0.791086
| 0
| 0
| 0.710526
| 0
| 0
| 0.328581
| 0.224994
| 0
| 0
| 0
| 0
| 0.071429
| 1
| 0.015038
| false
| 0
| 0.011278
| 0
| 0.026316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
11add8fa249fc071d577de895f07d82a7bf6c611
| 82
|
py
|
Python
|
main.py
|
donno2048/qruine
|
7ec170fb7984de70e4a6765ef2dbb6624d54cd41
|
[
"MIT"
] | null | null | null |
main.py
|
donno2048/qruine
|
7ec170fb7984de70e4a6765ef2dbb6624d54cd41
|
[
"MIT"
] | null | null | null |
main.py
|
donno2048/qruine
|
7ec170fb7984de70e4a6765ef2dbb6624d54cd41
|
[
"MIT"
] | null | null | null |
exec(s:='import qrcode;qrcode.make("exec(s:=%r)"%s).save(open("main.png", "wb"))')
| 82
| 82
| 0.621951
| 15
| 82
| 3.4
| 0.733333
| 0.196078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 82
| 1
| 82
| 82
| 0.6375
| 0
| 0
| 0
| 0
| 1
| 0.855422
| 0.686747
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
11e9822f551972428279d54e90255955921bdc6d
| 163
|
py
|
Python
|
src/__init__.py
|
paulwarkentin/tf-ssd-vgg
|
f48e3ccbb8eb092d3cb82a9d90164c7328880477
|
[
"MIT"
] | 5
|
2021-09-26T07:19:42.000Z
|
2022-03-11T23:25:36.000Z
|
ssd/training/__init__.py
|
bharatmahaur/ComparativeStudy
|
2e3b6de882acc2a465e1b7c8bcd23cc9c8181d3d
|
[
"Apache-2.0"
] | null | null | null |
ssd/training/__init__.py
|
bharatmahaur/ComparativeStudy
|
2e3b6de882acc2a465e1b7c8bcd23cc9c8181d3d
|
[
"Apache-2.0"
] | null | null | null |
##
## /src/__init__.py
##
## Created by Paul Warkentin <paul@warkentin.email> on 22/06/2018.
## Updated by Paul Warkentin <paul@warkentin.email> on 22/06/2018.
##
| 23.285714
| 66
| 0.693252
| 25
| 163
| 4.36
| 0.52
| 0.477064
| 0.275229
| 0.348624
| 0.788991
| 0.788991
| 0.788991
| 0.788991
| 0.788991
| 0.788991
| 0
| 0.112676
| 0.128834
| 163
| 6
| 67
| 27.166667
| 0.65493
| 0.883436
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
ee966251920b7bf5fee84508474b0ce744609e6f
| 33,994
|
py
|
Python
|
FreeFaucet.io_Bot.py
|
snoowl26/Crypto-Faucet-Bot
|
736695ce318e8f86846824d7c705a535e42650ef
|
[
"MIT"
] | 7
|
2021-03-04T05:08:12.000Z
|
2022-03-25T09:04:13.000Z
|
FreeFaucet.io_Bot.py
|
snoowl26/Crypto-Faucet-Bot
|
736695ce318e8f86846824d7c705a535e42650ef
|
[
"MIT"
] | null | null | null |
FreeFaucet.io_Bot.py
|
snoowl26/Crypto-Faucet-Bot
|
736695ce318e8f86846824d7c705a535e42650ef
|
[
"MIT"
] | 10
|
2021-03-05T06:08:57.000Z
|
2022-03-28T13:11:24.000Z
|
from selenium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import ElementNotInteractableException
from selenium.common.exceptions import UnexpectedAlertPresentException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.common.exceptions import TimeoutException
from io import BytesIO
import time
import keyboard
import sys
from random import randrange
import os
driver_path = "chromedriver.exe"
brave_path = "C:/Program Files/Google/Chrome/Application/chrome.exe"
dir_path = os.path.dirname(os.path.realpath(__file__))
credentials = "creds.txt"
timer = 0
option = webdriver.ChromeOptions()
option.binary_location = brave_path
option.add_argument("--incognito")
#option.add_argument("--headless")
with open(credentials) as f:
creds = f.readlines()
time.sleep(1)
bot_attempt = 0
dash_bot = 0
nem_bot = 0
ada_bot = 0
xrp_bot = 0
btc_bot = 0
steam_bot = 0
usdc_bot = 0
link_bot = 0
tron_bot = 0
bnc_bot = 0
neo_bot = 0
ltc_bot = 0
eth_bot = 0
dash_skip = 0
nem_skip = 0
ada_skip = 0
xrp_skip = 0
btc_skip = 0
steam_skip = 0
usdc_skip = 0
link_skip = 0
tron_skip = 0
bnc_skip = 0
neo_skip = 0
ltc_skip = 0
eth_skip = 0
def login():
try:
print("Checking for ad overlay")
ad_check = browser.find_element_by_id("fbf-mobile-close-coinzilla")
ad_check.click()
print("Ads closed")
except NoSuchElementException:
print("No Ads found")
dash_un_field = browser.find_element_by_xpath(
"/html/body/main/section/section[1]/div/div/div[2]/div/div[1]/div[1]/input")
dash_un_field.click()
dash_un_field.send_keys(username)
print("Entered username")
time.sleep(1)
dash_pw_field = browser.find_element_by_xpath(
"/html/body/main/section/section[1]/div/div/div[2]/div/div[1]/div[2]/input")
dash_pw_field.click()
dash_pw_field.send_keys(password)
print("Entered password")
time.sleep(1)
login_button = browser.find_element_by_xpath("/html/body/main/section/section[1]/div/div/div[2]/div/div[1]/button")
login_button.click()
print("Clicked Login Button")
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
print("Browser launched")
while True:
if dash_bot <= 2:
try:
print("Navigating to https://Freedash.io")
browser.get("https://freedash.io/free")
username = creds[9]
password = creds[10]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
dash_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
dash_skip += 1
print("https://Freedash.io skipped for bot cool down")
if dash_skip > 2:
dash_skip = 0
dash_bot = 0
print("Final cool down for https://Freedash.io")
####################################################################
if nem_bot <= 2:
try:
print("Navigating to https://Freenem.com")
browser.get("https://freenem.com/free")
username = creds[13]
password = creds[14]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
nem_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
nem_skip += 1
print("https://Freenem.com skipped for bot cool down")
if nem_skip > 2:
nem_skip = 0
nem_bot = 0
print("Final cool down for https://Freenem.com")
####################################################################
if ada_bot <= 2:
try:
print("Navigating to https://Freecardano.com")
browser.get("https://freecardano.com/free")
username = creds[17]
password = creds[18]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
ada_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
ada_skip += 1
print("https://Freecardano.com skipped for bot cool down")
if ada_skip > 2:
ada_skip = 0
ada_bot = 0
print("Final cool down for https://Freecardano.com")
####################################################################
if xrp_bot <=2:
try:
print("Navigating to https://coinfaucet.io")
browser.get("https://coinfaucet.io/free")
username = creds[21]
password = creds[22]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
xrp_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
xrp_skip += 1
print("https://coinfaucet.io skipped for bot cool down")
if xrp_skip > 2:
xrp_skip = 0
xrp_bot = 0
print("Final cool down for https://coinfaucet.io")
####################################################################
if btc_bot <= 2:
try:
print("Navigating to https://Freebitcoin.io")
browser.get("https://freebitcoin.io/free")
username = creds[25]
password = creds[26]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
btc_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
btc_skip += 1
print("https://Freebitcoin.io skipped for bot cooldown")
if btc_skip > 2:
btc_skip = 0
btc_bot = 0
print("Final cooldown for https://Freebitcoin.io")
####################################################################
if steam_bot <=2:
try:
print("Navigating to https://Freesteam.io")
browser.get("https://freesteam.io/free")
username = creds[29]
password = creds[30]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
steam_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
steam_skip += 1
print("https://Freesteam.io skipped for bot cooldown")
if steam_skip > 2:
steam_skip = 0
steam_bot = 0
print("Final cooldown for https://Freesteam.io")
####################################################################
if usdc_bot <= 2:
try:
print("Navigating to https://freeusdcoin.com/")
browser.get("https://freeusdcoin.com/free")
username = creds[33]
password = creds[34]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
usdc_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
usdc_skip += 1
print("https://freeusdcoin.com/ skipped for bot cooldown")
if usdc_skip > 2:
usdc_skip = 0
usdc_bot = 0
print("Final cooldown for https://freeusdcoin.com/")
####################################################################
if link_bot <= 2:
try:
print("Navigating to https://Freechainlink.io")
browser.get("https://freechainlink.io/free")
username = creds[37]
password = creds[38]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
link_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
link_skip += 1
print("https://Freechainlink.io skipped for bot cooldown")
if link_skip > 2:
link_skip = 0
link_bot = 0
print("Final cooldown for https://Freechainlink.io")
####################################################################
if tron_bot <= 2:
try:
print("Navigating to https://Free-tron.com")
browser.get("https://free-tron.com/free")
username = creds[41]
password = creds[42]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
tron_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
tron_skip += 1
print("https://Free-tron.com skipped for bot cooldown")
if tron_skip > 2:
tron_skip = 0
tron_bot = 0
print("Final cooldown for https://Free-tron.com")
####################################################################
if bnc_bot <= 2:
try:
print("Navigating to https://Freebinancecoin.com")
browser.get("https://freebinancecoin.com/free")
username = creds[45]
password = creds[46]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
bnb_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
bnb_skip += 1
print("https://Freebinancecoin.com skipped for bot cooldown")
if bnb_skip > 2:
bnb_skip = 0
bnb_bot = 0
print("Final cooldown for https://Freebinancecoin.com")
####################################################################
if neo_bot <= 2:
try:
print("Navigating to https://Freeneo.io")
browser.get("https://freeneo.io/free")
username = creds[49]
password = creds[50]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
neo_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
neo_skip += 1
print("https://Freeneo.io skipped for bot cooldown")
if neo_skip > 2:
neo_skip = 0
neo_bot = 0
print("Final cooldown for https://Freeneo.io")
####################################################################
if ltc_bot <= 2:
try:
print("Navigating to https://Free-ltc.com")
browser.get("https://free-ltc.com/free")
username = creds[53]
password = creds[54]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
ltc_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
ltc_skip += 1
print("https://Free-ltc.com skipped for bot cooldown")
if ltc_skip > 2:
ltc_skip = 0
ltc_bot = 0
print("Final cooldown for https://Free-ltc.com")
####################################################################
if eth_bot <= 2:
try:
print("Navigating to https://freeethereum.com/")
browser.get("https://freeethereum.com/free")
username = creds[57]
password = creds[58]
login()
very_human = randrange(60)
# time.sleep(very_human)
timer += very_human
for remaining in range(very_human, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("Waiting {:2d} seconds.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rChecking for Roll Button \n")
try:
roll_button = browser.find_element_by_xpath("/html/body/main/div/div/div/div/div/div[5]/button")
roll_button.click()
print("Clicked roll button")
except ElementNotInteractableException:
print("No roll button found")
time.sleep(10)
try:
WebDriverWait(browser, 4).until(expected_conditions.alert_is_present())
alert = browser.switch_to.alert
alert.accept()
print("Found bot alert, refreshing page")
browser.refresh()
bot_attempt += 1
eth_bot += 1
if bot_attempt > 10:
bot_attempt = 0
print("Bot check loop detected")
print("Seriously, who makes bot detection like this?")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
except TimeoutException:
print("No alerts found")
except:
print("Error encountered on page")
browser.close()
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
else:
eth_skip += 1
print("https://freeethereum.com/ skipped for bot cooldown")
if eth_skip > 2:
eth_skip = 0
eth_bot = 0
print("Final cooldown for https://freeethereum.com/")
####################################################################
browser.close()
print("All sites collected")
total_timer = 3500 - timer
print("Waiting for countdown: " + str(total_timer))
for remaining in range(total_timer, 0, -1):
sys.stdout.write("\r")
sys.stdout.write("{:2d} seconds remaining.".format(remaining))
sys.stdout.flush()
time.sleep(1)
sys.stdout.write("\rComplete! \n")
timer = 0
browser = webdriver.Chrome(executable_path=driver_path, chrome_options=option)
browser.maximize_window()
| 37.897436
| 120
| 0.512355
| 3,396
| 33,994
| 4.989988
| 0.066254
| 0.026201
| 0.02921
| 0.027617
| 0.807683
| 0.794819
| 0.781246
| 0.74401
| 0.729022
| 0.726838
| 0
| 0.016897
| 0.369771
| 33,994
| 896
| 121
| 37.939732
| 0.774085
| 0.009766
| 0
| 0.753968
| 0
| 0.021164
| 0.197295
| 0.028808
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001323
| false
| 0.019841
| 0.019841
| 0
| 0.021164
| 0.183862
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee98d613512a9bfbd81eafea10bc7efbfd5e750e
| 39,917
|
py
|
Python
|
tests/test_systems.py
|
jbradberry/universe
|
109261f75c49435d118cf9c5238124466a03d2d3
|
[
"MIT"
] | null | null | null |
tests/test_systems.py
|
jbradberry/universe
|
109261f75c49435d118cf9c5238124466a03d2d3
|
[
"MIT"
] | 24
|
2020-04-14T20:32:21.000Z
|
2021-08-03T13:29:45.000Z
|
tests/test_systems.py
|
jbradberry/universe
|
109261f75c49435d118cf9c5238124466a03d2d3
|
[
"MIT"
] | 3
|
2016-10-28T19:20:30.000Z
|
2021-02-28T03:19:49.000Z
|
import unittest
from universe import engine, systems
class UpdateTestCase(unittest.TestCase):
def test_create(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 2,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
]
}
updates = {
0: [
{'action': 'create', 'type': 'movement_order', 'actor_id': 1, 'seq': 0,
'x_t': 637, 'y_t': 786, 'warp': 8}
],
}
S = engine.GameState(state, updates)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 1)
self.assertEqual(
orders[2].serialize(),
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0,
'x_t': 637, 'y_t': 786, 'warp': 8}
)
def test_create_with_existing(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 3,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8}
]
}
updates = {
0: [
{'action': 'create', 'type': 'movement_order', 'actor_id': 1, 'seq': 1,
'x_t': 422, 'y_t': 210, 'warp': 10}
],
}
S = engine.GameState(state, updates)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 2)
self.assertEqual(
orders[3].serialize(),
{'pk': 3, 'type': 'movement_order', 'actor_id': 1, 'seq': 1,
'x_t': 422, 'y_t': 210, 'warp': 10}
)
def test_create_with_conflict(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 3,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8}
]
}
updates = {
0: [
{'action': 'create', 'type': 'movement_order', 'actor_id': 1, 'seq': 0,
'x_t': 422, 'y_t': 210, 'warp': 10}
],
}
S = engine.GameState(state, updates)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 1)
self.assertEqual(
orders[2].serialize(),
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0,
'x_t': 637, 'y_t': 786, 'warp': 8}
)
def test_reorder(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 4,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8},
{'pk': 3, 'type': 'movement_order', 'actor_id': 1, 'seq': 1, 'x_t': 422, 'y_t': 210, 'warp': 10}
]
}
updates = {
0: [
{'action': 'reorder', 'actor_id': 1, 'seq1': 0, 'seq2': 1}
],
}
S = engine.GameState(state, updates)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 2)
self.assertEqual(
orders[2].serialize(),
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 1, 'x_t': 637, 'y_t': 786, 'warp': 8}
)
self.assertEqual(
orders[3].serialize(),
{'pk': 3, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 422, 'y_t': 210, 'warp': 10}
)
def test_reorder_does_not_exist(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 4,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8},
{'pk': 3, 'type': 'movement_order', 'actor_id': 1, 'seq': 1, 'x_t': 422, 'y_t': 210, 'warp': 10}
]
}
updates = {
0: [
{'action': 'reorder', 'actor_id': 1, 'seq1': 0, 'seq2': 2}
],
}
S = engine.GameState(state, updates)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 2)
self.assertEqual(
orders[2].serialize(),
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8}
)
self.assertEqual(
orders[3].serialize(),
{'pk': 3, 'type': 'movement_order', 'actor_id': 1, 'seq': 1, 'x_t': 422, 'y_t': 210, 'warp': 10}
)
def test_update(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 3,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8},
]
}
updates = {
0: [
{'action': 'update', 'type': 'movement_order', 'actor_id': 1, 'seq': 0,
'x_t': 422, 'y_t': 210, 'warp': 10}
]
}
S = engine.GameState(state, updates)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 1)
self.assertEqual(
orders[2].serialize(),
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 422, 'y_t': 210, 'warp': 10}
)
def test_update_does_not_exist(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 3,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8},
]
}
updates = {
0: [
{'action': 'update', 'type': 'movement_order', 'actor_id': 1, 'seq': 1,
'x_t': 422, 'y_t': 210, 'warp': 10}
]
}
S = engine.GameState(state, updates)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 1)
self.assertEqual(
orders[2].serialize(),
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8}
)
def test_delete(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 3,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8},
]
}
updates = {
0: [
{'action': 'delete', 'actor_id': 1, 'seq': 0}
]
}
S = engine.GameState(state, updates)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 0)
def test_delete_does_not_exist(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 3,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8},
]
}
updates = {
0: [
{'action': 'delete', 'actor_id': 1, 'seq': 1}
]
}
S = engine.GameState(state, updates)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 1)
self.assertEqual(
orders[2].serialize(),
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8}
)
def test_issued_by_non_existent_species(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 3,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8},
]
}
updates = {
1: [
{'action': 'delete', 'actor_id': 1, 'seq': 0}
]
}
S = engine.GameState(state, updates)
self.assertEqual(len(S.manager._updates), 0)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 1)
self.assertEqual(
orders[2].serialize(),
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8}
)
def test_issued_for_non_existent_entity(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 3,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 1, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8},
]
}
updates = {
0: [
{'action': 'delete', 'actor_id': 3, 'seq': 0}
]
}
S = engine.GameState(state, updates)
self.assertEqual(len(S.manager._updates), 0)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 1)
self.assertEqual(
orders[2].serialize(),
{'pk': 2, 'type': 'movement_order', 'actor_id': 1, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8}
)
def test_issued_by_wrong_species(self):
state = {
'turn': 2500, 'width': 1000, 'seq': 4,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'species',
'name': 'Romulan',
'plural_name': 'Romulans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{'pk': 2, 'type': 'ship', 'x': 480, 'y': 235, 'owner_id': 0},
{'pk': 3, 'type': 'movement_order', 'actor_id': 2, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8},
]
}
updates = {
1: [
{'action': 'delete', 'actor_id': 2, 'seq': 0}
]
}
S = engine.GameState(state, updates)
self.assertEqual(len(S.manager._updates), 0)
systems.UpdateSystem().process(S.manager)
orders = S.manager.get_entities('orders')
self.assertEqual(len(orders), 1)
self.assertEqual(
orders[3].serialize(),
{'pk': 3, 'type': 'movement_order', 'actor_id': 2, 'seq': 0, 'x_t': 637, 'y_t': 786, 'warp': 8}
)
class MiningTestCase(unittest.TestCase):
def test_uninhabited(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 1000,
'x': 480,
'y': 235,
'gravity': 50,
'temperature': 50,
'radiation': 50,
'ironium_conc': 100,
'boranium_conc': 100,
'germanium_conc': 100,
},
]
}
S = engine.GameState(state, {})
systems.MiningSystem().process(S.manager)
planets = S.manager.get_entities('mineral_concentrations')
self.assertEqual(len(planets), 1)
self.assertEqual(planets[1].ironium or 0, 0)
self.assertEqual(planets[1].germanium or 0, 0)
self.assertEqual(planets[1].boranium or 0, 0)
def test_no_mines(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 1000,
'x': 480,
'y': 235,
'gravity': 50,
'temperature': 50,
'radiation': 50,
'ironium_conc': 100,
'boranium_conc': 100,
'germanium_conc': 100,
'owner_id': 0,
},
]
}
S = engine.GameState(state, {})
systems.MiningSystem().process(S.manager)
planets = S.manager.get_entities('mineral_concentrations')
self.assertEqual(len(planets), 1)
self.assertEqual(planets[1].ironium or 0, 0)
self.assertEqual(planets[1].germanium or 0, 0)
self.assertEqual(planets[1].boranium or 0, 0)
def test_full_concentration(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 1_000_000,
'x': 480,
'y': 235,
'gravity': 50,
'temperature': 50,
'radiation': 50,
'ironium_conc': 100,
'boranium_conc': 100,
'germanium_conc': 100,
'mines': 1000,
'owner_id': 0,
},
]
}
S = engine.GameState(state, {})
systems.MiningSystem().process(S.manager)
planets = S.manager.get_entities('mineral_concentrations')
self.assertEqual(len(planets), 1)
self.assertEqual(planets[1].ironium or 0, 1000)
self.assertEqual(planets[1].germanium or 0, 1000)
self.assertEqual(planets[1].boranium or 0, 1000)
def test_partial_concentration(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 1_000_000,
'x': 480,
'y': 235,
'gravity': 50,
'temperature': 50,
'radiation': 50,
'ironium_conc': 50,
'boranium_conc': 50,
'germanium_conc': 50,
'mines': 1000,
'owner_id': 0,
},
]
}
S = engine.GameState(state, {})
systems.MiningSystem().process(S.manager)
planets = S.manager.get_entities('mineral_concentrations')
self.assertEqual(len(planets), 1)
self.assertEqual(planets[1].ironium or 0, 500)
self.assertEqual(planets[1].germanium or 0, 500)
self.assertEqual(planets[1].boranium or 0, 500)
def test_depleted(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 1_000_000,
'x': 480,
'y': 235,
'gravity': 50,
'temperature': 50,
'radiation': 50,
'ironium_conc': 1,
'boranium_conc': 1,
'germanium_conc': 1,
'mines': 1000,
'owner_id': 0,
},
]
}
S = engine.GameState(state, {})
systems.MiningSystem().process(S.manager)
planets = S.manager.get_entities('mineral_concentrations')
self.assertEqual(len(planets), 1)
self.assertEqual(planets[1].ironium or 0, 10)
self.assertEqual(planets[1].germanium or 0, 10)
self.assertEqual(planets[1].boranium or 0, 10)
class PopulationGrowthTestCase(unittest.TestCase):
def test_habitability_growth(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 1000,
'x': 480,
'y': 235,
'gravity': 50,
'temperature': 50,
'radiation': 50,
'ironium_conc': 50,
'boranium_conc': 50,
'germanium_conc': 50,
'owner_id': 0,
},
]
}
S = engine.GameState(state, {})
results = S.generate()
self.assertEqual(len(results['entities']), 2)
self.assertEqual(results['entities'][1]['owner_id'], 0)
self.assertEqual(results['entities'][1]['population'], 1150)
def test_crowding_growth(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 500_000,
'x': 480,
'y': 235,
'gravity': 50,
'temperature': 50,
'radiation': 50,
'ironium_conc': 50,
'boranium_conc': 50,
'germanium_conc': 50,
'owner_id': 0,
},
]
}
S = engine.GameState(state, {})
results = S.generate()
self.assertEqual(len(results['entities']), 2)
self.assertEqual(results['entities'][1]['owner_id'], 0)
# At a population of 50% of capacity, the crowding factor
# should be 4/9. Effective growth rate should then be 6.67%.
self.assertEqual(results['entities'][1]['population'], 533_333)
def test_capacity_growth(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': False,
'temperature_immune': False,
'radiation_immune': False,
'gravity_min': 32,
'gravity_max': 86,
'temperature_min': 10,
'temperature_max': 64,
'radiation_min': 38,
'radiation_max': 90,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 1_000_000,
'x': 480,
'y': 235,
'gravity': 59,
'temperature': 37,
'radiation': 64,
'ironium_conc': 50,
'boranium_conc': 50,
'germanium_conc': 50,
'owner_id': 0,
},
]
}
S = engine.GameState(state, {})
results = S.generate()
self.assertEqual(len(results['entities']), 2)
self.assertEqual(results['entities'][1]['owner_id'], 0)
self.assertEqual(results['entities'][1]['population'], 1_000_000)
def test_overcrowding_growth(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': False,
'temperature_immune': False,
'radiation_immune': False,
'gravity_min': 32,
'gravity_max': 86,
'temperature_min': 10,
'temperature_max': 64,
'radiation_min': 38,
'radiation_max': 90,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 2_000_000,
'x': 480,
'y': 235,
'gravity': 59,
'temperature': 37,
'radiation': 64,
'ironium_conc': 50,
'boranium_conc': 50,
'germanium_conc': 50,
'owner_id': 0,
},
]
}
S = engine.GameState(state, {})
results = S.generate()
self.assertEqual(len(results['entities']), 2)
self.assertEqual(results['entities'][1]['owner_id'], 0)
# At a population of 200% of capacity, the death rate should be 100 * 0.04% = 4.0%.
self.assertEqual(results['entities'][1]['population'], 1_920_000)
def test_uninhabitable_growth(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': False,
'temperature_immune': False,
'radiation_immune': False,
'gravity_min': 32,
'gravity_max': 86,
'temperature_min': 10,
'temperature_max': 64,
'radiation_min': 38,
'radiation_max': 90,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 100_000,
'x': 480,
'y': 235,
'gravity': 1,
'temperature': 84,
'radiation': 3,
'ironium_conc': 50,
'boranium_conc': 50,
'germanium_conc': 50,
'owner_id': 0,
},
]
}
S = engine.GameState(state, {})
results = S.generate()
self.assertEqual(len(results['entities']), 2)
self.assertEqual(results['entities'][1]['owner_id'], 0)
# This planet should be at -45% habitable, so we should lose 4.5%.
self.assertEqual(results['entities'][1]['population'], 95_500)
def test_lose_ownership(self):
state = {
'turn': 2500,
'width': 1000,
'entities': [
{
'pk': 0,
'type': 'species',
'name': 'Human',
'plural_name': 'Humans',
'growth_rate': 15,
'gravity_immune': True,
'temperature_immune': True,
'radiation_immune': True,
'population_per_r': 1000,
'factories_produce_r': 10,
'factories_cost_r': 10,
'factories_per_pop': 10,
'factories_cost_less': False,
'minerals_per_m': 10,
'mines_cost_r': 5,
'mines_per_pop': 10,
},
{
'pk': 1,
'type': 'planet',
'population': 0,
'x': 480,
'y': 235,
'gravity': 50,
'temperature': 50,
'radiation': 50,
'ironium_conc': 50,
'boranium_conc': 50,
'germanium_conc': 50,
'owner_id': 0,
},
]
}
S = engine.GameState(state, {})
results = S.generate()
self.assertEqual(len(results['entities']), 2)
self.assertNotIn('population', results['entities'][1])
self.assertNotIn('owner_id', results['entities'][1])
| 36.222323
| 112
| 0.40003
| 3,509
| 39,917
| 4.318894
| 0.048447
| 0.05226
| 0.038007
| 0.023227
| 0.946156
| 0.94609
| 0.939492
| 0.922732
| 0.918905
| 0.918905
| 0
| 0.073678
| 0.465491
| 39,917
| 1,101
| 113
| 36.255223
| 0.636624
| 0.006564
| 0
| 0.783546
| 0
| 0
| 0.234401
| 0.002774
| 0
| 0
| 0
| 0
| 0.064643
| 1
| 0.022527
| false
| 0
| 0.001959
| 0
| 0.027424
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
01326a75e13cb645516eee9df670c6535d0e309d
| 178
|
py
|
Python
|
src/bee_psd/exector.py
|
awenhaowenchao/bee-psd
|
945875b47beb877daf2383979deb388f5d686c9f
|
[
"MIT"
] | 4
|
2019-11-12T05:01:42.000Z
|
2022-02-23T01:52:11.000Z
|
src/bee_psd/exector.py
|
awenhaowenchao/bee-psd
|
945875b47beb877daf2383979deb388f5d686c9f
|
[
"MIT"
] | 6
|
2021-03-19T08:13:39.000Z
|
2022-03-02T15:00:19.000Z
|
src/bee_psd/exector.py
|
awenhaowenchao/bee-psd
|
945875b47beb877daf2383979deb388f5d686c9f
|
[
"MIT"
] | null | null | null |
class IExecutor():
def exec(self, query, args=[]):
pass
def find_one(self, query, args=[]):
pass
def find_list(self, query, args=[]):
pass
| 16.181818
| 40
| 0.533708
| 22
| 178
| 4.227273
| 0.5
| 0.290323
| 0.419355
| 0.548387
| 0.516129
| 0.516129
| 0
| 0
| 0
| 0
| 0
| 0
| 0.314607
| 178
| 10
| 41
| 17.8
| 0.762295
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.428571
| 0
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
018e039ae96410a014b4e4ac5ea6b38b97fdfb32
| 255
|
py
|
Python
|
energyuse/apps/eusers/templatetags/decarbonet_user_extras.py
|
evhart/energyuse
|
be76bac535bfea33d30867e232c2dcb35e1c7740
|
[
"MIT"
] | null | null | null |
energyuse/apps/eusers/templatetags/decarbonet_user_extras.py
|
evhart/energyuse
|
be76bac535bfea33d30867e232c2dcb35e1c7740
|
[
"MIT"
] | 14
|
2019-12-26T17:01:14.000Z
|
2022-03-21T22:16:52.000Z
|
energyuse/apps/eusers/templatetags/decarbonet_user_extras.py
|
evhart/energyuse
|
be76bac535bfea33d30867e232c2dcb35e1c7740
|
[
"MIT"
] | null | null | null |
from django import template
from energyuse.apps.eusers.models import EnergyConsumption
register = template.Library()
@register.assignment_tag
def average_consumption(email,concept='all'):
return EnergyConsumption.average_consumption(email, concept)
| 28.333333
| 64
| 0.831373
| 29
| 255
| 7.206897
| 0.689655
| 0.172249
| 0.220096
| 0.287081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090196
| 255
| 8
| 65
| 31.875
| 0.900862
| 0
| 0
| 0
| 0
| 0
| 0.011765
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6d7475c34df128adc75dfc9b4a0dd800eb21cf77
| 120
|
py
|
Python
|
DSToolkit/searching/__init__.py
|
AndreaFerrante/DSToolkit
|
6f527cb4c19127cecd74bb682330236aa4e41839
|
[
"MIT"
] | null | null | null |
DSToolkit/searching/__init__.py
|
AndreaFerrante/DSToolkit
|
6f527cb4c19127cecd74bb682330236aa4e41839
|
[
"MIT"
] | null | null | null |
DSToolkit/searching/__init__.py
|
AndreaFerrante/DSToolkit
|
6f527cb4c19127cecd74bb682330236aa4e41839
|
[
"MIT"
] | null | null | null |
from .binary_search import *
from .linear_search import *
from .jump_search import *
from .interpolation_search import *
| 30
| 35
| 0.808333
| 16
| 120
| 5.8125
| 0.4375
| 0.516129
| 0.516129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 120
| 4
| 35
| 30
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6d9bba99d7fc42db759459e5882a59894332b98e
| 45
|
py
|
Python
|
tests/test_files/tuple.py
|
Procrat/typy
|
668cedb7f929256a09f565af9ee43c02889bec3f
|
[
"MIT"
] | 3
|
2016-03-08T09:55:20.000Z
|
2016-09-09T12:54:12.000Z
|
tests/test_files/tuple.py
|
Procrat/typy
|
668cedb7f929256a09f565af9ee43c02889bec3f
|
[
"MIT"
] | null | null | null |
tests/test_files/tuple.py
|
Procrat/typy
|
668cedb7f929256a09f565af9ee43c02889bec3f
|
[
"MIT"
] | null | null | null |
x = (5, 6)
def f():
return (7, 8)
f()
| 5.625
| 17
| 0.355556
| 9
| 45
| 1.777778
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0.377778
| 45
| 7
| 18
| 6.428571
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
6db505101c11af2b04fb26ef5b5a53648a221fe0
| 34
|
py
|
Python
|
__init__.py
|
kalliope-project/kalliope_neuron_rss_reader
|
1437e61ebd6f4d4d6926f15daef5fe036e4a5fdf
|
[
"MIT"
] | null | null | null |
__init__.py
|
kalliope-project/kalliope_neuron_rss_reader
|
1437e61ebd6f4d4d6926f15daef5fe036e4a5fdf
|
[
"MIT"
] | 6
|
2017-07-25T21:12:25.000Z
|
2020-05-06T12:01:59.000Z
|
__init__.py
|
kalliope-project/kalliope_neuron_rss_reader
|
1437e61ebd6f4d4d6926f15daef5fe036e4a5fdf
|
[
"MIT"
] | 5
|
2017-08-10T00:22:22.000Z
|
2021-09-05T16:08:16.000Z
|
from rss_reader import Rss_reader
| 17
| 33
| 0.882353
| 6
| 34
| 4.666667
| 0.666667
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 34
| 1
| 34
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
09c34dd1625f3d016f8a41a7ed770933513ee1b6
| 550
|
py
|
Python
|
eval_covid19china_timm-regnetx_002_RandomSnow.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid19china_timm-regnetx_002_RandomSnow.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid19china_timm-regnetx_002_RandomSnow.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_0_RandomSnow.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_1_RandomSnow.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_2_RandomSnow.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_3_RandomSnow.yml",
"python main.py --configs configs/eval_covid19china_unetplusplus_timm-regnetx_002_4_RandomSnow.yml",
]
for l in ls:
os.system(l)
| 50
| 104
| 0.849091
| 80
| 550
| 5.4625
| 0.3
| 0.114416
| 0.1373
| 0.217391
| 0.897025
| 0.897025
| 0.897025
| 0.897025
| 0.897025
| 0.897025
| 0
| 0.057803
| 0.056364
| 550
| 11
| 105
| 50
| 0.7842
| 0
| 0
| 0
| 0
| 0
| 0.880218
| 0.653358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
113c559e22b764a25326e63dc53542fd0b3bfa21
| 144,154
|
py
|
Python
|
GM2AUTOSAR_MM/GM2AUTOSAR_MM_MM.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 3
|
2017-06-02T19:26:27.000Z
|
2021-06-14T04:25:45.000Z
|
GM2AUTOSAR_MM/GM2AUTOSAR_MM_MM.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 8
|
2016-08-24T07:04:07.000Z
|
2017-05-26T16:22:47.000Z
|
GM2AUTOSAR_MM/GM2AUTOSAR_MM_MM.py
|
levilucio/SyVOLT
|
7526ec794d21565e3efcc925a7b08ae8db27d46a
|
[
"MIT"
] | 1
|
2019-10-31T06:00:23.000Z
|
2019-10-31T06:00:23.000Z
|
"""
__GM2AUTOSAR_MM_MM.py______________________________________________________
Automatically generated AToM3 MetaModel (DO NOT MODIFY DIRECTLY)
Author: levi
Modified: Sun Aug 9 23:43:00 2015
___________________________________________________________________________
"""
from ASG_GM2AUTOSAR_MM import *
from graph_ASG_ERmetaMetaModel import *
from Tkinter import *
from ATOM3TypeInfo import *
from ATOM3String import *
from StatusBar import *
from ATOM3TypeDialog import *
from MatchModel import *
from ApplyModel import *
from MetaModelElement_S import *
from MetaModelElement_T import *
from ECU import *
from VirtualDevice import *
from Distributable import *
from ExecFrame import *
from Signal import *
from System import *
from SystemMapping import *
from SoftwareComposition import *
from CompositionType import *
from ComponentPrototype import *
from PPortPrototype import *
from RPortPrototype import *
from EcuInstance import *
from SwcToEcuMapping import *
from SwCompToEcuMapping_component import *
from PortPrototype import *
from ComponentType import *
from paired_with import *
from match_contains import *
from directLink_S import *
from directLink_T import *
from apply_contains import *
from indirectLink_S import *
from backward_link import *
from trace_link import *
def createNewASGroot(self):
return ASG_GM2AUTOSAR_MM(self, None)
def createModelMenu(self, modelMenu):
"Creates a customized Model Menu for the actual formalism"
modelMenu.add_command(label="New MatchModel", command=lambda x=self: x.createNewMatchModel(x, 100, 100) )
modelMenu.add_command(label="New ApplyModel", command=lambda x=self: x.createNewApplyModel(x, 100, 100) )
modelMenu.add_command(label="New MetaModelElement_S", command=lambda x=self: x.createNewMetaModelElement_S(x, 100, 100) )
modelMenu.add_command(label="New MetaModelElement_T", command=lambda x=self: x.createNewMetaModelElement_T(x, 100, 100) )
modelMenu.add_command(label="New ECU", command=lambda x=self: x.createNewECU(x, 100, 100) )
modelMenu.add_command(label="New VirtualDevice", command=lambda x=self: x.createNewVirtualDevice(x, 100, 100) )
modelMenu.add_command(label="New Distributable", command=lambda x=self: x.createNewDistributable(x, 100, 100) )
modelMenu.add_command(label="New ExecFrame", command=lambda x=self: x.createNewExecFrame(x, 100, 100) )
modelMenu.add_command(label="New Signal", command=lambda x=self: x.createNewSignal(x, 100, 100) )
modelMenu.add_command(label="New System", command=lambda x=self: x.createNewSystem(x, 100, 100) )
modelMenu.add_command(label="New SystemMapping", command=lambda x=self: x.createNewSystemMapping(x, 100, 100) )
modelMenu.add_command(label="New SoftwareComposition", command=lambda x=self: x.createNewSoftwareComposition(x, 100, 100) )
modelMenu.add_command(label="New CompositionType", command=lambda x=self: x.createNewCompositionType(x, 100, 100) )
modelMenu.add_command(label="New ComponentPrototype", command=lambda x=self: x.createNewComponentPrototype(x, 100, 100) )
modelMenu.add_command(label="New PPortPrototype", command=lambda x=self: x.createNewPPortPrototype(x, 100, 100) )
modelMenu.add_command(label="New RPortPrototype", command=lambda x=self: x.createNewRPortPrototype(x, 100, 100) )
modelMenu.add_command(label="New EcuInstance", command=lambda x=self: x.createNewEcuInstance(x, 100, 100) )
modelMenu.add_command(label="New SwcToEcuMapping", command=lambda x=self: x.createNewSwcToEcuMapping(x, 100, 100) )
modelMenu.add_command(label="New SwCompToEcuMapping_component", command=lambda x=self: x.createNewSwCompToEcuMapping_component(x, 100, 100) )
modelMenu.add_command(label="New PortPrototype", command=lambda x=self: x.createNewPortPrototype(x, 100, 100) )
modelMenu.add_command(label="New ComponentType", command=lambda x=self: x.createNewComponentType(x, 100, 100) )
modelMenu.add_command(label="New paired_with", command=lambda x=self: x.createNewpaired_with(x, 100, 100) )
modelMenu.add_command(label="New match_contains", command=lambda x=self: x.createNewmatch_contains(x, 100, 100) )
modelMenu.add_command(label="New directLink_S", command=lambda x=self: x.createNewdirectLink_S(x, 100, 100) )
modelMenu.add_command(label="New directLink_T", command=lambda x=self: x.createNewdirectLink_T(x, 100, 100) )
modelMenu.add_command(label="New apply_contains", command=lambda x=self: x.createNewapply_contains(x, 100, 100) )
modelMenu.add_command(label="New indirectLink_S", command=lambda x=self: x.createNewindirectLink_S(x, 100, 100) )
modelMenu.add_command(label="New backward_link", command=lambda x=self: x.createNewbackward_link(x, 100, 100) )
modelMenu.add_command(label="New trace_link", command=lambda x=self: x.createNewtrace_link(x, 100, 100) )
def setConnectivity(self):
self.ConnectivityMap['directLink_S']={
'directLink_S': [( 'MetaModelElement_S', self.createNewMetaModelElement_S), ( 'ECU', self.createNewECU), ( 'VirtualDevice', self.createNewVirtualDevice), ( 'Distributable', self.createNewDistributable), ( 'ExecFrame', self.createNewExecFrame), ( 'Signal', self.createNewSignal)]
,'apply_contains': []
,'directLink_T': []
,'Distributable': []
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': [( 'MetaModelElement_S', self.createNewMetaModelElement_S), ( 'ECU', self.createNewECU), ( 'VirtualDevice', self.createNewVirtualDevice), ( 'Distributable', self.createNewDistributable), ( 'ExecFrame', self.createNewExecFrame), ( 'Signal', self.createNewSignal)]
,'MetaModelElement_T': []
,'ECU': []
,'MetaModelElement_S': []
,'SwcToEcuMapping': []
,'VirtualDevice': []
,'ExecFrame': []
,'PortPrototype': []
,'Signal': []
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['apply_contains']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': [( 'MetaModelElement_T', self.createNewMetaModelElement_T), ( 'System', self.createNewSystem), ( 'SystemMapping', self.createNewSystemMapping), ( 'SoftwareComposition', self.createNewSoftwareComposition), ( 'CompositionType', self.createNewCompositionType), ( 'ComponentPrototype', self.createNewComponentPrototype), ( 'PPortPrototype', self.createNewPPortPrototype), ( 'RPortPrototype', self.createNewRPortPrototype), ( 'EcuInstance', self.createNewEcuInstance), ( 'SwcToEcuMapping', self.createNewSwcToEcuMapping), ( 'SwCompToEcuMapping_component', self.createNewSwCompToEcuMapping_component), ( 'PortPrototype', self.createNewPortPrototype), ( 'ComponentType', self.createNewComponentType)]
,'Distributable': []
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': [( 'MetaModelElement_T', self.createNewMetaModelElement_T), ( 'System', self.createNewSystem), ( 'SystemMapping', self.createNewSystemMapping), ( 'SoftwareComposition', self.createNewSoftwareComposition), ( 'CompositionType', self.createNewCompositionType), ( 'ComponentPrototype', self.createNewComponentPrototype), ( 'PPortPrototype', self.createNewPPortPrototype), ( 'RPortPrototype', self.createNewRPortPrototype), ( 'EcuInstance', self.createNewEcuInstance), ( 'SwcToEcuMapping', self.createNewSwcToEcuMapping), ( 'SwCompToEcuMapping_component', self.createNewSwCompToEcuMapping_component), ( 'PortPrototype', self.createNewPortPrototype), ( 'ComponentType', self.createNewComponentType)]
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': []
,'MetaModelElement_T': []
,'ECU': []
,'MetaModelElement_S': []
,'SwcToEcuMapping': []
,'VirtualDevice': []
,'ExecFrame': []
,'PortPrototype': []
,'Signal': []
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': [( 'MetaModelElement_T', self.createNewMetaModelElement_T), ( 'System', self.createNewSystem), ( 'SystemMapping', self.createNewSystemMapping), ( 'SoftwareComposition', self.createNewSoftwareComposition), ( 'CompositionType', self.createNewCompositionType), ( 'ComponentPrototype', self.createNewComponentPrototype), ( 'PPortPrototype', self.createNewPPortPrototype), ( 'RPortPrototype', self.createNewRPortPrototype), ( 'EcuInstance', self.createNewEcuInstance), ( 'SwcToEcuMapping', self.createNewSwcToEcuMapping), ( 'SwCompToEcuMapping_component', self.createNewSwCompToEcuMapping_component), ( 'PortPrototype', self.createNewPortPrototype), ( 'ComponentType', self.createNewComponentType)]
,'RPortPrototype': [] }
self.ConnectivityMap['SystemMapping']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['Distributable']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': []
,'MetaModelElement_T': []
,'ECU': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'MetaModelElement_S': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'SwcToEcuMapping': []
,'VirtualDevice': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ExecFrame': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'PortPrototype': []
,'Signal': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['System']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['ComponentType']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['paired_with']={
'directLink_S': []
,'apply_contains': [( 'ApplyModel', self.createNewApplyModel)]
,'directLink_T': []
,'Distributable': []
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': []
,'MetaModelElement_T': []
,'ECU': []
,'MetaModelElement_S': []
,'SwcToEcuMapping': []
,'VirtualDevice': []
,'ExecFrame': []
,'PortPrototype': []
,'Signal': []
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['PPortPrototype']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['backward_link']={
'directLink_S': [( 'MetaModelElement_S', self.createNewMetaModelElement_S), ( 'ECU', self.createNewECU), ( 'VirtualDevice', self.createNewVirtualDevice), ( 'Distributable', self.createNewDistributable), ( 'ExecFrame', self.createNewExecFrame), ( 'Signal', self.createNewSignal)]
,'apply_contains': []
,'directLink_T': []
,'Distributable': []
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': [( 'MetaModelElement_S', self.createNewMetaModelElement_S), ( 'ECU', self.createNewECU), ( 'VirtualDevice', self.createNewVirtualDevice), ( 'Distributable', self.createNewDistributable), ( 'ExecFrame', self.createNewExecFrame), ( 'Signal', self.createNewSignal)]
,'MetaModelElement_T': []
,'ECU': []
,'MetaModelElement_S': []
,'SwcToEcuMapping': []
,'VirtualDevice': []
,'ExecFrame': []
,'PortPrototype': []
,'Signal': []
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['ComponentPrototype']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['directLink_T']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': [( 'MetaModelElement_T', self.createNewMetaModelElement_T), ( 'System', self.createNewSystem), ( 'SystemMapping', self.createNewSystemMapping), ( 'SoftwareComposition', self.createNewSoftwareComposition), ( 'CompositionType', self.createNewCompositionType), ( 'ComponentPrototype', self.createNewComponentPrototype), ( 'PPortPrototype', self.createNewPPortPrototype), ( 'RPortPrototype', self.createNewRPortPrototype), ( 'EcuInstance', self.createNewEcuInstance), ( 'SwcToEcuMapping', self.createNewSwcToEcuMapping), ( 'SwCompToEcuMapping_component', self.createNewSwCompToEcuMapping_component), ( 'PortPrototype', self.createNewPortPrototype), ( 'ComponentType', self.createNewComponentType)]
,'Distributable': []
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': [( 'MetaModelElement_T', self.createNewMetaModelElement_T), ( 'System', self.createNewSystem), ( 'SystemMapping', self.createNewSystemMapping), ( 'SoftwareComposition', self.createNewSoftwareComposition), ( 'CompositionType', self.createNewCompositionType), ( 'ComponentPrototype', self.createNewComponentPrototype), ( 'PPortPrototype', self.createNewPPortPrototype), ( 'RPortPrototype', self.createNewRPortPrototype), ( 'EcuInstance', self.createNewEcuInstance), ( 'SwcToEcuMapping', self.createNewSwcToEcuMapping), ( 'SwCompToEcuMapping_component', self.createNewSwCompToEcuMapping_component), ( 'PortPrototype', self.createNewPortPrototype), ( 'ComponentType', self.createNewComponentType)]
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': []
,'MetaModelElement_T': []
,'ECU': []
,'MetaModelElement_S': []
,'SwcToEcuMapping': []
,'VirtualDevice': []
,'ExecFrame': []
,'PortPrototype': []
,'Signal': []
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': [( 'MetaModelElement_T', self.createNewMetaModelElement_T), ( 'System', self.createNewSystem), ( 'SystemMapping', self.createNewSystemMapping), ( 'SoftwareComposition', self.createNewSoftwareComposition), ( 'CompositionType', self.createNewCompositionType), ( 'ComponentPrototype', self.createNewComponentPrototype), ( 'PPortPrototype', self.createNewPPortPrototype), ( 'RPortPrototype', self.createNewRPortPrototype), ( 'EcuInstance', self.createNewEcuInstance), ( 'SwcToEcuMapping', self.createNewSwcToEcuMapping), ( 'SwCompToEcuMapping_component', self.createNewSwCompToEcuMapping_component), ( 'PortPrototype', self.createNewPortPrototype), ( 'ComponentType', self.createNewComponentType)]
,'RPortPrototype': [] }
self.ConnectivityMap['ApplyModel']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': []
,'System': [( 'apply_contains', self.createNewapply_contains)]
,'ComponentType': [( 'apply_contains', self.createNewapply_contains)]
,'paired_with': []
,'PPortPrototype': [( 'apply_contains', self.createNewapply_contains)]
,'backward_link': []
,'SystemMapping': [( 'apply_contains', self.createNewapply_contains)]
,'ApplyModel': []
,'CompositionType': [( 'apply_contains', self.createNewapply_contains)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'apply_contains', self.createNewapply_contains)]
,'ECU': []
,'MetaModelElement_S': []
,'SwcToEcuMapping': [( 'apply_contains', self.createNewapply_contains)]
,'VirtualDevice': []
,'ExecFrame': []
,'PortPrototype': [( 'apply_contains', self.createNewapply_contains)]
,'Signal': []
,'ComponentPrototype': [( 'apply_contains', self.createNewapply_contains)]
,'SoftwareComposition': [( 'apply_contains', self.createNewapply_contains)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'apply_contains', self.createNewapply_contains)]
,'EcuInstance': [( 'apply_contains', self.createNewapply_contains)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'apply_contains', self.createNewapply_contains)] }
self.ConnectivityMap['CompositionType']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['indirectLink_S']={
'directLink_S': [( 'MetaModelElement_S', self.createNewMetaModelElement_S), ( 'ECU', self.createNewECU), ( 'VirtualDevice', self.createNewVirtualDevice), ( 'Distributable', self.createNewDistributable), ( 'ExecFrame', self.createNewExecFrame), ( 'Signal', self.createNewSignal)]
,'apply_contains': []
,'directLink_T': []
,'Distributable': []
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': [( 'MetaModelElement_S', self.createNewMetaModelElement_S), ( 'ECU', self.createNewECU), ( 'VirtualDevice', self.createNewVirtualDevice), ( 'Distributable', self.createNewDistributable), ( 'ExecFrame', self.createNewExecFrame), ( 'Signal', self.createNewSignal)]
,'MetaModelElement_T': []
,'ECU': []
,'MetaModelElement_S': []
,'SwcToEcuMapping': []
,'VirtualDevice': []
,'ExecFrame': []
,'PortPrototype': []
,'Signal': []
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['MetaModelElement_T']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['ECU']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': []
,'MetaModelElement_T': []
,'ECU': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'MetaModelElement_S': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'SwcToEcuMapping': []
,'VirtualDevice': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ExecFrame': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'PortPrototype': []
,'Signal': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['MetaModelElement_S']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': []
,'MetaModelElement_T': []
,'ECU': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'MetaModelElement_S': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'SwcToEcuMapping': []
,'VirtualDevice': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ExecFrame': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'PortPrototype': []
,'Signal': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['SwcToEcuMapping']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['VirtualDevice']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': []
,'MetaModelElement_T': []
,'ECU': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'MetaModelElement_S': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'SwcToEcuMapping': []
,'VirtualDevice': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ExecFrame': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'PortPrototype': []
,'Signal': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['ExecFrame']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': []
,'MetaModelElement_T': []
,'ECU': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'MetaModelElement_S': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'SwcToEcuMapping': []
,'VirtualDevice': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ExecFrame': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'PortPrototype': []
,'Signal': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['PortPrototype']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['Signal']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': []
,'MetaModelElement_T': []
,'ECU': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'MetaModelElement_S': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'SwcToEcuMapping': []
,'VirtualDevice': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ExecFrame': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'PortPrototype': []
,'Signal': [( 'directLink_S', self.createNewdirectLink_S), ( 'indirectLink_S', self.createNewindirectLink_S)]
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['match_contains']={
'directLink_S': [( 'MetaModelElement_S', self.createNewMetaModelElement_S), ( 'ECU', self.createNewECU), ( 'VirtualDevice', self.createNewVirtualDevice), ( 'Distributable', self.createNewDistributable), ( 'ExecFrame', self.createNewExecFrame), ( 'Signal', self.createNewSignal)]
,'apply_contains': []
,'directLink_T': []
,'Distributable': []
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': [( 'MetaModelElement_S', self.createNewMetaModelElement_S), ( 'ECU', self.createNewECU), ( 'VirtualDevice', self.createNewVirtualDevice), ( 'Distributable', self.createNewDistributable), ( 'ExecFrame', self.createNewExecFrame), ( 'Signal', self.createNewSignal)]
,'MetaModelElement_T': []
,'ECU': []
,'MetaModelElement_S': []
,'SwcToEcuMapping': []
,'VirtualDevice': []
,'ExecFrame': []
,'PortPrototype': []
,'Signal': []
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['SoftwareComposition']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['MatchModel']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'match_contains', self.createNewmatch_contains)]
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': [( 'paired_with', self.createNewpaired_with)]
,'CompositionType': []
,'indirectLink_S': []
,'MetaModelElement_T': []
,'ECU': [( 'match_contains', self.createNewmatch_contains)]
,'MetaModelElement_S': [( 'match_contains', self.createNewmatch_contains)]
,'SwcToEcuMapping': []
,'VirtualDevice': [( 'match_contains', self.createNewmatch_contains)]
,'ExecFrame': [( 'match_contains', self.createNewmatch_contains)]
,'PortPrototype': []
,'Signal': [( 'match_contains', self.createNewmatch_contains)]
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['SwCompToEcuMapping_component']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['EcuInstance']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.ConnectivityMap['trace_link']={
'directLink_S': [( 'MetaModelElement_S', self.createNewMetaModelElement_S), ( 'ECU', self.createNewECU), ( 'VirtualDevice', self.createNewVirtualDevice), ( 'Distributable', self.createNewDistributable), ( 'ExecFrame', self.createNewExecFrame), ( 'Signal', self.createNewSignal)]
,'apply_contains': []
,'directLink_T': []
,'Distributable': []
,'System': []
,'ComponentType': []
,'paired_with': []
,'PPortPrototype': []
,'backward_link': []
,'SystemMapping': []
,'ApplyModel': []
,'CompositionType': []
,'indirectLink_S': [( 'MetaModelElement_S', self.createNewMetaModelElement_S), ( 'ECU', self.createNewECU), ( 'VirtualDevice', self.createNewVirtualDevice), ( 'Distributable', self.createNewDistributable), ( 'ExecFrame', self.createNewExecFrame), ( 'Signal', self.createNewSignal)]
,'MetaModelElement_T': []
,'ECU': []
,'MetaModelElement_S': []
,'SwcToEcuMapping': []
,'VirtualDevice': []
,'ExecFrame': []
,'PortPrototype': []
,'Signal': []
,'ComponentPrototype': []
,'SoftwareComposition': []
,'MatchModel': []
,'SwCompToEcuMapping_component': []
,'EcuInstance': []
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [] }
self.ConnectivityMap['RPortPrototype']={
'directLink_S': []
,'apply_contains': []
,'directLink_T': []
,'Distributable': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'System': [( 'directLink_T', self.createNewdirectLink_T)]
,'ComponentType': [( 'directLink_T', self.createNewdirectLink_T)]
,'paired_with': []
,'PPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'backward_link': []
,'SystemMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'ApplyModel': []
,'CompositionType': [( 'directLink_T', self.createNewdirectLink_T)]
,'indirectLink_S': []
,'MetaModelElement_T': [( 'directLink_T', self.createNewdirectLink_T)]
,'ECU': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'MetaModelElement_S': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'SwcToEcuMapping': [( 'directLink_T', self.createNewdirectLink_T)]
,'VirtualDevice': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ExecFrame': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'PortPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'Signal': [( 'backward_link', self.createNewbackward_link), ( 'trace_link', self.createNewtrace_link)]
,'ComponentPrototype': [( 'directLink_T', self.createNewdirectLink_T)]
,'SoftwareComposition': [( 'directLink_T', self.createNewdirectLink_T)]
,'MatchModel': []
,'SwCompToEcuMapping_component': [( 'directLink_T', self.createNewdirectLink_T)]
,'EcuInstance': [( 'directLink_T', self.createNewdirectLink_T)]
,'match_contains': []
,'trace_link': []
,'RPortPrototype': [( 'directLink_T', self.createNewdirectLink_T)] }
self.CardinalityTable['MatchModel']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': [('1', '1', 'Source')]
,'match_contains': [('0', 'N', 'Source')]
,'directLink_S': []
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': []
,'backward_link': []
,'trace_link': [] }
self.CardinalityTable['ApplyModel']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': [('1', '1', 'Destination')]
,'match_contains': []
,'directLink_S': []
,'directLink_T': []
,'apply_contains': [('0', 'N', 'Source')]
,'indirectLink_S': []
,'backward_link': []
,'trace_link': [] }
self.CardinalityTable['MetaModelElement_S']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': [('0', 'N', 'Destination')]
,'directLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'backward_link': [('0', 'N', 'Destination')]
,'trace_link': [('0', 'N', 'Destination')] }
self.CardinalityTable['MetaModelElement_T']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['ECU']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': [('0', 'N', 'Destination')]
,'directLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'backward_link': [('0', 'N', 'Destination')]
,'trace_link': [('0', 'N', 'Destination')] }
self.CardinalityTable['VirtualDevice']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': [('0', 'N', 'Destination')]
,'directLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'backward_link': [('0', 'N', 'Destination')]
,'trace_link': [('0', 'N', 'Destination')] }
self.CardinalityTable['Distributable']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': [('0', 'N', 'Destination')]
,'directLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'backward_link': [('0', 'N', 'Destination')]
,'trace_link': [('0', 'N', 'Destination')] }
self.CardinalityTable['ExecFrame']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': [('0', 'N', 'Destination')]
,'directLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'backward_link': [('0', 'N', 'Destination')]
,'trace_link': [('0', 'N', 'Destination')] }
self.CardinalityTable['Signal']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': [('0', 'N', 'Destination')]
,'directLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'backward_link': [('0', 'N', 'Destination')]
,'trace_link': [('0', 'N', 'Destination')] }
self.CardinalityTable['System']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['SystemMapping']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['SoftwareComposition']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['CompositionType']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['ComponentPrototype']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['PPortPrototype']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['RPortPrototype']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['EcuInstance']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['SwcToEcuMapping']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['SwCompToEcuMapping_component']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['PortPrototype']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['ComponentType']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'apply_contains': [('0', 'N', 'Destination')]
,'indirectLink_S': []
,'backward_link': [('0', 'N', 'Source')]
,'trace_link': [('0', 'N', 'Source')] }
self.CardinalityTable['paired_with']={
'MatchModel': [('1', '1', 'Destination')]
,'ApplyModel': [('1', '1', 'Source')]
,'MetaModelElement_S': []
,'MetaModelElement_T': []
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': []
,'backward_link': []
,'trace_link': [] }
self.CardinalityTable['match_contains']={
'MatchModel': [('0', 'N', 'Destination')]
,'ApplyModel': []
,'MetaModelElement_S': [('0', 'N', 'Source')]
,'MetaModelElement_T': []
,'ECU': [('0', 'N', 'Source')]
,'VirtualDevice': [('0', 'N', 'Source')]
,'Distributable': [('0', 'N', 'Source')]
,'ExecFrame': [('0', 'N', 'Source')]
,'Signal': [('0', 'N', 'Source')]
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': []
,'backward_link': []
,'trace_link': [] }
self.CardinalityTable['directLink_S']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'MetaModelElement_T': []
,'ECU': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'VirtualDevice': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'Distributable': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'ExecFrame': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'Signal': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': []
,'backward_link': []
,'trace_link': [] }
self.CardinalityTable['directLink_T']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': []
,'MetaModelElement_T': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'SystemMapping': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'SoftwareComposition': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'CompositionType': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'ComponentPrototype': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'PPortPrototype': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'RPortPrototype': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'EcuInstance': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'SwcToEcuMapping': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'SwCompToEcuMapping_component': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'PortPrototype': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'ComponentType': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': []
,'backward_link': []
,'trace_link': [] }
self.CardinalityTable['apply_contains']={
'MatchModel': []
,'ApplyModel': [('0', 'N', 'Destination')]
,'MetaModelElement_S': []
,'MetaModelElement_T': [('0', 'N', 'Source')]
,'ECU': []
,'VirtualDevice': []
,'Distributable': []
,'ExecFrame': []
,'Signal': []
,'System': [('0', 'N', 'Source')]
,'SystemMapping': [('0', 'N', 'Source')]
,'SoftwareComposition': [('0', 'N', 'Source')]
,'CompositionType': [('0', 'N', 'Source')]
,'ComponentPrototype': [('0', 'N', 'Source')]
,'PPortPrototype': [('0', 'N', 'Source')]
,'RPortPrototype': [('0', 'N', 'Source')]
,'EcuInstance': [('0', 'N', 'Source')]
,'SwcToEcuMapping': [('0', 'N', 'Source')]
,'SwCompToEcuMapping_component': [('0', 'N', 'Source')]
,'PortPrototype': [('0', 'N', 'Source')]
,'ComponentType': [('0', 'N', 'Source')]
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': []
,'backward_link': []
,'trace_link': [] }
self.CardinalityTable['indirectLink_S']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'MetaModelElement_T': []
,'ECU': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'VirtualDevice': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'Distributable': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'ExecFrame': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'Signal': [('0', 'N', 'Source'), ('0', 'N', 'Destination')]
,'System': []
,'SystemMapping': []
,'SoftwareComposition': []
,'CompositionType': []
,'ComponentPrototype': []
,'PPortPrototype': []
,'RPortPrototype': []
,'EcuInstance': []
,'SwcToEcuMapping': []
,'SwCompToEcuMapping_component': []
,'PortPrototype': []
,'ComponentType': []
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': []
,'backward_link': []
,'trace_link': [] }
self.CardinalityTable['backward_link']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': [('0', 'N', 'Source')]
,'MetaModelElement_T': [('0', 'N', 'Destination')]
,'ECU': [('0', 'N', 'Source')]
,'VirtualDevice': [('0', 'N', 'Source')]
,'Distributable': [('0', 'N', 'Source')]
,'ExecFrame': [('0', 'N', 'Source')]
,'Signal': [('0', 'N', 'Source')]
,'System': [('0', 'N', 'Destination')]
,'SystemMapping': [('0', 'N', 'Destination')]
,'SoftwareComposition': [('0', 'N', 'Destination')]
,'CompositionType': [('0', 'N', 'Destination')]
,'ComponentPrototype': [('0', 'N', 'Destination')]
,'PPortPrototype': [('0', 'N', 'Destination')]
,'RPortPrototype': [('0', 'N', 'Destination')]
,'EcuInstance': [('0', 'N', 'Destination')]
,'SwcToEcuMapping': [('0', 'N', 'Destination')]
,'SwCompToEcuMapping_component': [('0', 'N', 'Destination')]
,'PortPrototype': [('0', 'N', 'Destination')]
,'ComponentType': [('0', 'N', 'Destination')]
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': []
,'backward_link': []
,'trace_link': [] }
self.CardinalityTable['trace_link']={
'MatchModel': []
,'ApplyModel': []
,'MetaModelElement_S': [('0', 'N', 'Source')]
,'MetaModelElement_T': [('0', 'N', 'Destination')]
,'ECU': [('0', 'N', 'Source')]
,'VirtualDevice': [('0', 'N', 'Source')]
,'Distributable': [('0', 'N', 'Source')]
,'ExecFrame': [('0', 'N', 'Source')]
,'Signal': [('0', 'N', 'Source')]
,'System': [('0', 'N', 'Destination')]
,'SystemMapping': [('0', 'N', 'Destination')]
,'SoftwareComposition': [('0', 'N', 'Destination')]
,'CompositionType': [('0', 'N', 'Destination')]
,'ComponentPrototype': [('0', 'N', 'Destination')]
,'PPortPrototype': [('0', 'N', 'Destination')]
,'RPortPrototype': [('0', 'N', 'Destination')]
,'EcuInstance': [('0', 'N', 'Destination')]
,'SwcToEcuMapping': [('0', 'N', 'Destination')]
,'SwCompToEcuMapping_component': [('0', 'N', 'Destination')]
,'PortPrototype': [('0', 'N', 'Destination')]
,'ComponentType': [('0', 'N', 'Destination')]
,'paired_with': []
,'match_contains': []
,'directLink_S': []
,'directLink_T': []
,'apply_contains': []
,'indirectLink_S': []
,'backward_link': []
,'trace_link': [] }
self.entitiesInMetaModel['GM2AUTOSAR_MM']=["MatchModel", "ApplyModel", "MetaModelElement_S", "MetaModelElement_T", "ECU", "VirtualDevice", "Distributable", "ExecFrame", "Signal", "System", "SystemMapping", "SoftwareComposition", "CompositionType", "ComponentPrototype", "PPortPrototype", "RPortPrototype", "EcuInstance", "SwcToEcuMapping", "SwCompToEcuMapping_component", "PortPrototype", "ComponentType", "paired_with", "match_contains", "directLink_S", "directLink_T", "apply_contains", "indirectLink_S", "backward_link", "trace_link"]
def createNewMatchModel(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = MatchModel(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["MatchModel"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_MatchModel(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_MatchModel(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("MatchModel", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewApplyModel(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = ApplyModel(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["ApplyModel"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_ApplyModel(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_ApplyModel(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("ApplyModel", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewMetaModelElement_S(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = MetaModelElement_S(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["MetaModelElement_S"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_MetaModelElement_S(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_MetaModelElement_S(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("MetaModelElement_S", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewMetaModelElement_T(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = MetaModelElement_T(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["MetaModelElement_T"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_MetaModelElement_T(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_MetaModelElement_T(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("MetaModelElement_T", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewECU(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = ECU(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["ECU"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_ECU(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_ECU(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("ECU", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewVirtualDevice(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = VirtualDevice(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["VirtualDevice"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_VirtualDevice(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_VirtualDevice(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("VirtualDevice", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewDistributable(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = Distributable(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["Distributable"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_Distributable(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_Distributable(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("Distributable", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewExecFrame(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = ExecFrame(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["ExecFrame"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_ExecFrame(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_ExecFrame(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("ExecFrame", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewSignal(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = Signal(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["Signal"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_Signal(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_Signal(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("Signal", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewSystem(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = System(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["System"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_System(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_System(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("System", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewSystemMapping(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = SystemMapping(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["SystemMapping"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_SystemMapping(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_SystemMapping(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("SystemMapping", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewSoftwareComposition(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = SoftwareComposition(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["SoftwareComposition"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_SoftwareComposition(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_SoftwareComposition(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("SoftwareComposition", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewCompositionType(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = CompositionType(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["CompositionType"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_CompositionType(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_CompositionType(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("CompositionType", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewComponentPrototype(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = ComponentPrototype(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["ComponentPrototype"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_ComponentPrototype(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_ComponentPrototype(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("ComponentPrototype", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewPPortPrototype(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = PPortPrototype(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["PPortPrototype"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_PPortPrototype(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_PPortPrototype(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("PPortPrototype", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewRPortPrototype(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = RPortPrototype(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["RPortPrototype"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_RPortPrototype(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_RPortPrototype(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("RPortPrototype", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewEcuInstance(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = EcuInstance(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["EcuInstance"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_EcuInstance(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_EcuInstance(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("EcuInstance", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewSwcToEcuMapping(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = SwcToEcuMapping(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["SwcToEcuMapping"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_SwcToEcuMapping(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_SwcToEcuMapping(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("SwcToEcuMapping", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewSwCompToEcuMapping_component(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = SwCompToEcuMapping_component(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["SwCompToEcuMapping_component"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_SwCompToEcuMapping_component(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_SwCompToEcuMapping_component(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("SwCompToEcuMapping_component", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewPortPrototype(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = PortPrototype(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["PortPrototype"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_PortPrototype(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_PortPrototype(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("PortPrototype", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewComponentType(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = ComponentType(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["ComponentType"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_ComponentType(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_ComponentType(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("ComponentType", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewpaired_with(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = paired_with(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["paired_with"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_paired_with(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_paired_with(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("paired_with", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewmatch_contains(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = match_contains(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["match_contains"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_match_contains(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_match_contains(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("match_contains", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewdirectLink_S(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = directLink_S(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["directLink_S"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_directLink_S(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_directLink_S(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("directLink_S", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewdirectLink_T(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = directLink_T(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["directLink_T"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_directLink_T(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_directLink_T(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("directLink_T", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewapply_contains(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = apply_contains(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["apply_contains"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_apply_contains(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_apply_contains(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("apply_contains", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewindirectLink_S(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = indirectLink_S(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["indirectLink_S"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_indirectLink_S(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_indirectLink_S(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("indirectLink_S", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewbackward_link(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = backward_link(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["backward_link"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_backward_link(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_backward_link(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("backward_link", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNewtrace_link(self, wherex, wherey, screenCoordinates = 1):
self.fromClass = None
self.toClass = None
# try the global constraints...
res = self.ASGroot.preCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj = trace_link(self)
res = new_semantic_obj.preCondition ( ASGNode.CREATE )
if res: return self.constraintViolation(res)
new_semantic_obj.preAction ( ASGNode.CREATE )
ne = len(self.ASGroot.listNodes["trace_link"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_trace_link(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_trace_link(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("trace_link", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
res = self.ASGroot.postCondition(ASG.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
res = new_semantic_obj.postCondition(ASGNode.CREATE)
if res:
self.constraintViolation(res)
self.mode=self.IDLEMODE
return
new_semantic_obj.postAction(ASGNode.CREATE)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def createNew_Model(self, wherex, wherey, screenCoordinates = 1):
self.toClass = None
self.fromClass = None
new_semantic_obj = ASG_GM2AUTOSAR_MM(self)
ne = len(self.ASGroot.listNodes["ASG_GM2AUTOSAR_MM"])
if new_semantic_obj.keyword_:
new_semantic_obj.keyword_.setValue(new_semantic_obj.keyword_.toString()+str(ne))
if screenCoordinates:
new_obj = graph_ASG_ERmetaMetaModel(self.UMLmodel.canvasx(wherex), self.UMLmodel.canvasy(wherey), new_semantic_obj)
else: # already in canvas coordinates
new_obj = graph_ASG_ERmetaMetaModel(wherex, wherey, new_semantic_obj)
new_obj.DrawObject(self.UMLmodel, self.editGGLabel)
self.UMLmodel.addtag_withtag("ASG_GM2AUTOSAR_MM", new_obj.tag)
new_semantic_obj.graphObject_ = new_obj
self.ASGroot.addNode(new_semantic_obj)
self.mode=self.IDLEMODE
if self.editGGLabel :
self.statusbar.event(StatusBar.TRANSFORMATION, StatusBar.CREATE)
else:
self.statusbar.event(StatusBar.MODEL, StatusBar.CREATE)
return new_semantic_obj
def fillTypesInformation(self):
objs = []
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("String", "ATOM3String", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("None")
params.append(param)
param = ATOM3String("None")
params.append(param)
param = ATOM3String("1")
params.append(param)
obj.setValue(("Boolean", "ATOM3Boolean", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Integer", "ATOM3Integer", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Float", "ATOM3Float", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("self.types")
params.append(param)
obj.setValue(("Attribute", "ATOM3Attribute", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("[1,1,1,self.types]")
params.append(param)
param = ATOM3String("ATOM3Attribute")
params.append(param)
param = ATOM3String("self.types")
params.append(param)
obj.setValue(("List", "ATOM3List", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("[]")
params.append(param)
param = ATOM3String("1")
params.append(param)
param = ATOM3String("1")
params.append(param)
obj.setValue(("Enum", "ATOM3Enum", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Constraint", "ATOM3Constraint", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Action", "ATOM3Action", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("'class0'")
params.append(param)
param = ATOM3String("None")
params.append(param)
obj.setValue(("Appearance", "ATOM3Appearance", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("BottomType", "ATOM3BottomType", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Link", "ATOM3Link", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Port", "ATOM3Port", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Connection", "ATOM3Connection", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
param = ATOM3String("None")
params.append(param)
param = ATOM3String("None")
params.append(param)
param = ATOM3String("1")
params.append(param)
obj.setValue(("MSEnum", "ATOM3MSEnum", params, (None, 0) ))
objs.append(obj)
obj = ATOM3TypeInfo(self)
params = []
obj.setValue(("Text", "ATOM3Text", params, (None, 0) ))
objs.append(obj)
self.typeList.setValue(objs)
| 44.355077
| 721
| 0.626046
| 12,813
| 144,154
| 6.812534
| 0.016234
| 0.048643
| 0.061909
| 0.065827
| 0.934413
| 0.917286
| 0.908098
| 0.905131
| 0.895577
| 0.887363
| 0
| 0.004665
| 0.220785
| 144,154
| 3,249
| 722
| 44.368729
| 0.772432
| 0.014519
| 0
| 0.874717
| 1
| 0
| 0.239569
| 0.013798
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011007
| false
| 0
| 0.011654
| 0.000324
| 0.060861
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fed6aab404f176c01045ce8b086097e38c2bb092
| 728
|
py
|
Python
|
calculate_anything/calculation/__init__.py
|
friday/ulauncher-albert-calculate-anything
|
65e34ded08a4d88a66ec9fcd29bec41e57b32967
|
[
"MIT"
] | null | null | null |
calculate_anything/calculation/__init__.py
|
friday/ulauncher-albert-calculate-anything
|
65e34ded08a4d88a66ec9fcd29bec41e57b32967
|
[
"MIT"
] | null | null | null |
calculate_anything/calculation/__init__.py
|
friday/ulauncher-albert-calculate-anything
|
65e34ded08a4d88a66ec9fcd29bec41e57b32967
|
[
"MIT"
] | null | null | null |
from calculate_anything.calculation.calculation import Calculation, BooleanCalculation
from calculate_anything.calculation.currency import CurrencyCalculation
from calculate_anything.calculation.time import TimeCalculation, LocationTimeCalculation, TimedeltaCalculation
from calculate_anything.calculation.percentage import PercentageCalculation, NormalPercentageCalculation, InversePercentageCalculation
from calculate_anything.calculation.base_n import (
BaseNCalculation, Base16StringCalculation, Base10Calculation, Base2Calculation, Base8Calculation,
Base16Calculation, ColorBase16Calculation
)
from calculate_anything.calculation.units import UnitsCalculation, CurrencyUnitsCalculation, TemperatureUnitsCalculation
| 72.8
| 134
| 0.898352
| 56
| 728
| 11.553571
| 0.517857
| 0.120556
| 0.194745
| 0.296754
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014684
| 0.06456
| 728
| 9
| 135
| 80.888889
| 0.935389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fefffa50e146f4e2ffe2d459462dcc854252978f
| 100
|
py
|
Python
|
swarm_tasks/envs/__init__.py
|
rmvanarse/swarm_tasks
|
3335297ba8fcdbff756ae519002bcce919d54a84
|
[
"MIT"
] | 6
|
2021-03-13T12:54:18.000Z
|
2022-01-29T12:12:28.000Z
|
swarm_tasks/envs/__init__.py
|
rmvanarse/swarm_tasks
|
3335297ba8fcdbff756ae519002bcce919d54a84
|
[
"MIT"
] | null | null | null |
swarm_tasks/envs/__init__.py
|
rmvanarse/swarm_tasks
|
3335297ba8fcdbff756ae519002bcce919d54a84
|
[
"MIT"
] | 2
|
2021-08-06T15:02:15.000Z
|
2022-02-08T12:11:30.000Z
|
import shapely
import swarm_tasks.envs.world
import swarm_tasks.envs.items
import swarm_tasks.utils
| 20
| 29
| 0.87
| 16
| 100
| 5.25
| 0.5
| 0.392857
| 0.571429
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 100
| 4
| 30
| 25
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
28ab36634a7614fc0c7220e8ed2e264d51db0162
| 50,130
|
py
|
Python
|
src/larksuiteoapi/service/contact/v3/api.py
|
VXenomac/oapi-sdk-python
|
156b789b3d20653802f64842c9a26229dd9252d7
|
[
"Apache-2.0"
] | 50
|
2021-04-11T05:24:10.000Z
|
2022-03-29T10:14:13.000Z
|
src/larksuiteoapi/service/contact/v3/api.py
|
larksuite/oapi-sdk-python
|
70fda5b1ccf765938bf207dff0117c0c03a93605
|
[
"Apache-2.0"
] | 20
|
2021-04-07T15:17:44.000Z
|
2022-03-23T06:27:12.000Z
|
src/larksuiteoapi/service/contact/v3/api.py
|
VXenomac/oapi-sdk-python
|
156b789b3d20653802f64842c9a26229dd9252d7
|
[
"Apache-2.0"
] | 8
|
2021-04-25T15:02:17.000Z
|
2022-03-13T15:00:59.000Z
|
# -*- coding: UTF-8 -*-
# Code generated by lark suite oapi sdk gen
from typing import *
from ....api import Request as APIRequest, Response as APIResponse, set_timeout, set_tenant_key, set_user_access_token, set_path_params, \
set_query_params, set_response_stream, set_is_response_stream, FormData, FormDataFile
from ....config import Config
from ....consts import ACCESS_TOKEN_TYPE_TENANT, ACCESS_TOKEN_TYPE_USER, ACCESS_TOKEN_TYPE_APP
from .model import *
class Service(object):
def __init__(self, conf):
# type: (Config) -> None
self.conf = conf
self.users = UserService(self)
self.departments = DepartmentService(self)
self.scopes = ScopeService(self)
self.employee_type_enums = EmployeeTypeEnumService(self)
self.custom_attrs = CustomAttrService(self)
self.groups = GroupService(self)
self.group_members = GroupMemberService(self)
class UserService(object):
def __init__(self, service):
# type: (Service) -> None
self.service = service
def delete(self, body, tenant_key=None, timeout=None):
# type: (UserDeleteReqBody, str, int) -> UserDeleteReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return UserDeleteReqCall(self, body, request_opts=request_opts)
def update(self, body, tenant_key=None, timeout=None):
# type: (User, str, int) -> UserUpdateReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return UserUpdateReqCall(self, body, request_opts=request_opts)
def create(self, body, tenant_key=None, timeout=None):
# type: (User, str, int) -> UserCreateReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return UserCreateReqCall(self, body, request_opts=request_opts)
def patch(self, body, tenant_key=None, user_access_token=None, timeout=None):
# type: (User, str, str, int) -> UserPatchReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
if user_access_token is not None:
request_opts += [set_user_access_token(user_access_token)]
return UserPatchReqCall(self, body, request_opts=request_opts)
def get(self, tenant_key=None, user_access_token=None, timeout=None):
# type: (str, str, int) -> UserGetReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
if user_access_token is not None:
request_opts += [set_user_access_token(user_access_token)]
return UserGetReqCall(self, request_opts=request_opts)
def list(self, tenant_key=None, user_access_token=None, timeout=None):
# type: (str, str, int) -> UserListReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
if user_access_token is not None:
request_opts += [set_user_access_token(user_access_token)]
return UserListReqCall(self, request_opts=request_opts)
class DepartmentService(object):
def __init__(self, service):
# type: (Service) -> None
self.service = service
def get(self, tenant_key=None, user_access_token=None, timeout=None):
# type: (str, str, int) -> DepartmentGetReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
if user_access_token is not None:
request_opts += [set_user_access_token(user_access_token)]
return DepartmentGetReqCall(self, request_opts=request_opts)
def list(self, tenant_key=None, user_access_token=None, timeout=None):
# type: (str, str, int) -> DepartmentListReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
if user_access_token is not None:
request_opts += [set_user_access_token(user_access_token)]
return DepartmentListReqCall(self, request_opts=request_opts)
def patch(self, body, tenant_key=None, timeout=None):
# type: (Department, str, int) -> DepartmentPatchReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return DepartmentPatchReqCall(self, body, request_opts=request_opts)
def create(self, body, tenant_key=None, timeout=None):
# type: (Department, str, int) -> DepartmentCreateReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return DepartmentCreateReqCall(self, body, request_opts=request_opts)
def delete(self, tenant_key=None, timeout=None):
# type: (str, int) -> DepartmentDeleteReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return DepartmentDeleteReqCall(self, request_opts=request_opts)
def update(self, body, tenant_key=None, timeout=None):
# type: (Department, str, int) -> DepartmentUpdateReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return DepartmentUpdateReqCall(self, body, request_opts=request_opts)
def parent(self, tenant_key=None, user_access_token=None, timeout=None):
# type: (str, str, int) -> DepartmentParentReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
if user_access_token is not None:
request_opts += [set_user_access_token(user_access_token)]
return DepartmentParentReqCall(self, request_opts=request_opts)
def search(self, body, user_access_token=None, timeout=None):
# type: (DepartmentSearchReqBody, str, int) -> DepartmentSearchReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if user_access_token is not None:
request_opts += [set_user_access_token(user_access_token)]
return DepartmentSearchReqCall(self, body, request_opts=request_opts)
class ScopeService(object):
def __init__(self, service):
# type: (Service) -> None
self.service = service
class EmployeeTypeEnumService(object):
def __init__(self, service):
# type: (Service) -> None
self.service = service
def list(self, tenant_key=None, timeout=None):
# type: (str, int) -> EmployeeTypeEnumListReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return EmployeeTypeEnumListReqCall(self, request_opts=request_opts)
def create(self, body, tenant_key=None, timeout=None):
# type: (EmployeeTypeEnum, str, int) -> EmployeeTypeEnumCreateReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return EmployeeTypeEnumCreateReqCall(self, body, request_opts=request_opts)
def delete(self, tenant_key=None, timeout=None):
# type: (str, int) -> EmployeeTypeEnumDeleteReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return EmployeeTypeEnumDeleteReqCall(self, request_opts=request_opts)
def update(self, body, tenant_key=None, timeout=None):
# type: (EmployeeTypeEnum, str, int) -> EmployeeTypeEnumUpdateReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return EmployeeTypeEnumUpdateReqCall(self, body, request_opts=request_opts)
class CustomAttrService(object):
def __init__(self, service):
# type: (Service) -> None
self.service = service
def list(self, tenant_key=None, timeout=None):
# type: (str, int) -> CustomAttrListReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return CustomAttrListReqCall(self, request_opts=request_opts)
class GroupService(object):
def __init__(self, service):
# type: (Service) -> None
self.service = service
def create(self, body, tenant_key=None, timeout=None):
# type: (GroupCreateReqBody, str, int) -> GroupCreateReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return GroupCreateReqCall(self, body, request_opts=request_opts)
def simplelist(self, tenant_key=None, timeout=None):
# type: (str, int) -> GroupSimplelistReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return GroupSimplelistReqCall(self, request_opts=request_opts)
def delete(self, tenant_key=None, timeout=None):
# type: (str, int) -> GroupDeleteReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return GroupDeleteReqCall(self, request_opts=request_opts)
def get(self, tenant_key=None, timeout=None):
# type: (str, int) -> GroupGetReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return GroupGetReqCall(self, request_opts=request_opts)
def patch(self, body, tenant_key=None, timeout=None):
# type: (GroupPatchReqBody, str, int) -> GroupPatchReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return GroupPatchReqCall(self, body, request_opts=request_opts)
class GroupMemberService(object):
def __init__(self, service):
# type: (Service) -> None
self.service = service
def simplelist(self, tenant_key=None, timeout=None):
# type: (str, int) -> GroupMemberSimplelistReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return GroupMemberSimplelistReqCall(self, request_opts=request_opts)
def add(self, body, tenant_key=None, timeout=None):
# type: (GroupMemberAddReqBody, str, int) -> GroupMemberAddReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return GroupMemberAddReqCall(self, body, request_opts=request_opts)
def remove(self, body, tenant_key=None, timeout=None):
# type: (GroupMemberRemoveReqBody, str, int) -> GroupMemberRemoveReqCall
request_opts = [] # type: List[Callable[[Any], Any]]
if timeout is not None:
request_opts += [set_timeout(timeout)]
if tenant_key is not None:
request_opts += [set_tenant_key(tenant_key)]
return GroupMemberRemoveReqCall(self, body, request_opts=request_opts)
class CustomAttrListReqCall(object):
def __init__(self, service, request_opts=None):
# type: (CustomAttrService, List[Any]) -> None
self.service = service
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_page_size(self, page_size):
# type: (int) -> CustomAttrListReqCall
self.query_params['page_size'] = page_size
return self
def set_page_token(self, page_token):
# type: (str) -> CustomAttrListReqCall
self.query_params['page_token'] = page_token
return self
def do(self):
# type: () -> APIResponse[Type[CustomAttrListResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/custom_attrs', 'GET', [ACCESS_TOKEN_TYPE_TENANT],
None, output_class=CustomAttrListResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class DepartmentGetReqCall(object):
def __init__(self, service, request_opts=None):
# type: (DepartmentService, List[Any]) -> None
self.service = service
self.path_params = {} # type: Dict[str, Any]
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_department_id(self, department_id):
# type: (str) -> DepartmentGetReqCall
self.path_params['department_id'] = department_id
return self
def set_user_id_type(self, user_id_type):
# type: (str) -> DepartmentGetReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> DepartmentGetReqCall
self.query_params['department_id_type'] = department_id_type
return self
def do(self):
# type: () -> APIResponse[Type[DepartmentGetResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/departments/:department_id', 'GET', [ACCESS_TOKEN_TYPE_TENANT, ACCESS_TOKEN_TYPE_USER],
None, output_class=DepartmentGetResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class DepartmentListReqCall(object):
def __init__(self, service, request_opts=None):
# type: (DepartmentService, List[Any]) -> None
self.service = service
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_user_id_type(self, user_id_type):
# type: (str) -> DepartmentListReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> DepartmentListReqCall
self.query_params['department_id_type'] = department_id_type
return self
def set_parent_department_id(self, parent_department_id):
# type: (str) -> DepartmentListReqCall
self.query_params['parent_department_id'] = parent_department_id
return self
def set_fetch_child(self, fetch_child):
# type: (bool) -> DepartmentListReqCall
self.query_params['fetch_child'] = fetch_child
return self
def set_page_token(self, page_token):
# type: (str) -> DepartmentListReqCall
self.query_params['page_token'] = page_token
return self
def set_page_size(self, page_size):
# type: (int) -> DepartmentListReqCall
self.query_params['page_size'] = page_size
return self
def do(self):
# type: () -> APIResponse[Type[DepartmentListResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/departments', 'GET', [ACCESS_TOKEN_TYPE_TENANT, ACCESS_TOKEN_TYPE_USER],
None, output_class=DepartmentListResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class DepartmentPatchReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (DepartmentService, Department, List[Any]) -> None
self.service = service
self.body = body
self.path_params = {} # type: Dict[str, Any]
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_department_id(self, department_id):
# type: (str) -> DepartmentPatchReqCall
self.path_params['department_id'] = department_id
return self
def set_user_id_type(self, user_id_type):
# type: (str) -> DepartmentPatchReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> DepartmentPatchReqCall
self.query_params['department_id_type'] = department_id_type
return self
def do(self):
# type: () -> APIResponse[Type[DepartmentPatchResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/departments/:department_id', 'PATCH', [ACCESS_TOKEN_TYPE_TENANT],
self.body, output_class=DepartmentPatchResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class DepartmentCreateReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (DepartmentService, Department, List[Any]) -> None
self.service = service
self.body = body
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_user_id_type(self, user_id_type):
# type: (str) -> DepartmentCreateReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> DepartmentCreateReqCall
self.query_params['department_id_type'] = department_id_type
return self
def set_client_token(self, client_token):
# type: (str) -> DepartmentCreateReqCall
self.query_params['client_token'] = client_token
return self
def do(self):
# type: () -> APIResponse[Type[DepartmentCreateResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/departments', 'POST', [ACCESS_TOKEN_TYPE_TENANT],
self.body, output_class=DepartmentCreateResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class DepartmentDeleteReqCall(object):
def __init__(self, service, request_opts=None):
# type: (DepartmentService, List[Any]) -> None
self.service = service
self.path_params = {} # type: Dict[str, Any]
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_department_id(self, department_id):
# type: (str) -> DepartmentDeleteReqCall
self.path_params['department_id'] = department_id
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> DepartmentDeleteReqCall
self.query_params['department_id_type'] = department_id_type
return self
def do(self):
# type: () -> APIResponse[Type[None]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/departments/:department_id', 'DELETE', [ACCESS_TOKEN_TYPE_TENANT],
None, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class DepartmentUpdateReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (DepartmentService, Department, List[Any]) -> None
self.service = service
self.body = body
self.path_params = {} # type: Dict[str, Any]
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_department_id(self, department_id):
# type: (str) -> DepartmentUpdateReqCall
self.path_params['department_id'] = department_id
return self
def set_user_id_type(self, user_id_type):
# type: (str) -> DepartmentUpdateReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> DepartmentUpdateReqCall
self.query_params['department_id_type'] = department_id_type
return self
def do(self):
# type: () -> APIResponse[Type[DepartmentUpdateResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/departments/:department_id', 'PUT', [ACCESS_TOKEN_TYPE_TENANT],
self.body, output_class=DepartmentUpdateResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class DepartmentParentReqCall(object):
def __init__(self, service, request_opts=None):
# type: (DepartmentService, List[Any]) -> None
self.service = service
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_user_id_type(self, user_id_type):
# type: (str) -> DepartmentParentReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> DepartmentParentReqCall
self.query_params['department_id_type'] = department_id_type
return self
def set_department_id(self, department_id):
# type: (str) -> DepartmentParentReqCall
self.query_params['department_id'] = department_id
return self
def set_page_token(self, page_token):
# type: (str) -> DepartmentParentReqCall
self.query_params['page_token'] = page_token
return self
def set_page_size(self, page_size):
# type: (int) -> DepartmentParentReqCall
self.query_params['page_size'] = page_size
return self
def do(self):
# type: () -> APIResponse[Type[DepartmentParentResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/departments/parent', 'GET', [ACCESS_TOKEN_TYPE_TENANT, ACCESS_TOKEN_TYPE_USER],
None, output_class=DepartmentParentResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class DepartmentSearchReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (DepartmentService, DepartmentSearchReqBody, List[Any]) -> None
self.service = service
self.body = body
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_user_id_type(self, user_id_type):
# type: (str) -> DepartmentSearchReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> DepartmentSearchReqCall
self.query_params['department_id_type'] = department_id_type
return self
def set_page_token(self, page_token):
# type: (str) -> DepartmentSearchReqCall
self.query_params['page_token'] = page_token
return self
def set_page_size(self, page_size):
# type: (int) -> DepartmentSearchReqCall
self.query_params['page_size'] = page_size
return self
def do(self):
# type: () -> APIResponse[Type[DepartmentSearchResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/departments/search', 'POST', [ACCESS_TOKEN_TYPE_USER],
self.body, output_class=DepartmentSearchResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class EmployeeTypeEnumListReqCall(object):
def __init__(self, service, request_opts=None):
# type: (EmployeeTypeEnumService, List[Any]) -> None
self.service = service
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_page_token(self, page_token):
# type: (str) -> EmployeeTypeEnumListReqCall
self.query_params['page_token'] = page_token
return self
def set_page_size(self, page_size):
# type: (int) -> EmployeeTypeEnumListReqCall
self.query_params['page_size'] = page_size
return self
def do(self):
# type: () -> APIResponse[Type[EmployeeTypeEnumListResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/employee_type_enums', 'GET', [ACCESS_TOKEN_TYPE_TENANT],
None, output_class=EmployeeTypeEnumListResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class EmployeeTypeEnumCreateReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (EmployeeTypeEnumService, EmployeeTypeEnum, List[Any]) -> None
self.service = service
self.body = body
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def do(self):
# type: () -> APIResponse[Type[EmployeeTypeEnumCreateResult]]
root_service = self.service.service
conf = root_service.conf
req = APIRequest('/open-apis/contact/v3/employee_type_enums', 'POST', [ACCESS_TOKEN_TYPE_TENANT],
self.body, output_class=EmployeeTypeEnumCreateResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class EmployeeTypeEnumDeleteReqCall(object):
def __init__(self, service, request_opts=None):
# type: (EmployeeTypeEnumService, List[Any]) -> None
self.service = service
self.path_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_enum_id(self, enum_id):
# type: (str) -> EmployeeTypeEnumDeleteReqCall
self.path_params['enum_id'] = enum_id
return self
def do(self):
# type: () -> APIResponse[Type[None]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
req = APIRequest('/open-apis/contact/v3/employee_type_enums/:enum_id', 'DELETE', [ACCESS_TOKEN_TYPE_TENANT],
None, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class EmployeeTypeEnumUpdateReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (EmployeeTypeEnumService, EmployeeTypeEnum, List[Any]) -> None
self.service = service
self.body = body
self.path_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_enum_id(self, enum_id):
# type: (str) -> EmployeeTypeEnumUpdateReqCall
self.path_params['enum_id'] = enum_id
return self
def do(self):
# type: () -> APIResponse[Type[EmployeeTypeEnumUpdateResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
req = APIRequest('/open-apis/contact/v3/employee_type_enums/:enum_id', 'PUT', [ACCESS_TOKEN_TYPE_TENANT],
self.body, output_class=EmployeeTypeEnumUpdateResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class GroupCreateReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (GroupService, GroupCreateReqBody, List[Any]) -> None
self.service = service
self.body = body
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def do(self):
# type: () -> APIResponse[Type[GroupCreateResult]]
root_service = self.service.service
conf = root_service.conf
req = APIRequest('/open-apis/contact/v3/group', 'POST', [ACCESS_TOKEN_TYPE_TENANT],
self.body, output_class=GroupCreateResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class GroupSimplelistReqCall(object):
def __init__(self, service, request_opts=None):
# type: (GroupService, List[Any]) -> None
self.service = service
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_page_size(self, page_size):
# type: (int) -> GroupSimplelistReqCall
self.query_params['page_size'] = page_size
return self
def set_page_token(self, page_token):
# type: (str) -> GroupSimplelistReqCall
self.query_params['page_token'] = page_token
return self
def set_type(self, type):
# type: (int) -> GroupSimplelistReqCall
self.query_params['type'] = type
return self
def do(self):
# type: () -> APIResponse[Type[GroupSimplelistResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/group/simplelist', 'GET', [ACCESS_TOKEN_TYPE_TENANT],
None, output_class=GroupSimplelistResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class GroupDeleteReqCall(object):
def __init__(self, service, request_opts=None):
# type: (GroupService, List[Any]) -> None
self.service = service
self.path_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_group_id(self, group_id):
# type: (str) -> GroupDeleteReqCall
self.path_params['group_id'] = group_id
return self
def do(self):
# type: () -> APIResponse[Type[None]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
req = APIRequest('/open-apis/contact/v3/group/:group_id', 'DELETE', [ACCESS_TOKEN_TYPE_TENANT],
None, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class GroupGetReqCall(object):
def __init__(self, service, request_opts=None):
# type: (GroupService, List[Any]) -> None
self.service = service
self.path_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_group_id(self, group_id):
# type: (str) -> GroupGetReqCall
self.path_params['group_id'] = group_id
return self
def do(self):
# type: () -> APIResponse[Type[GroupGetResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
req = APIRequest('/open-apis/contact/v3/group/:group_id', 'GET', [ACCESS_TOKEN_TYPE_TENANT],
None, output_class=GroupGetResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class GroupPatchReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (GroupService, GroupPatchReqBody, List[Any]) -> None
self.service = service
self.body = body
self.path_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_group_id(self, group_id):
# type: (str) -> GroupPatchReqCall
self.path_params['group_id'] = group_id
return self
def do(self):
# type: () -> APIResponse[Type[None]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
req = APIRequest('/open-apis/contact/v3/group/:group_id', 'PATCH', [ACCESS_TOKEN_TYPE_TENANT],
self.body, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class GroupMemberSimplelistReqCall(object):
def __init__(self, service, request_opts=None):
# type: (GroupMemberService, List[Any]) -> None
self.service = service
self.path_params = {} # type: Dict[str, Any]
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_group_id(self, group_id):
# type: (str) -> GroupMemberSimplelistReqCall
self.path_params['group_id'] = group_id
return self
def set_page_size(self, page_size):
# type: (int) -> GroupMemberSimplelistReqCall
self.query_params['page_size'] = page_size
return self
def set_page_token(self, page_token):
# type: (str) -> GroupMemberSimplelistReqCall
self.query_params['page_token'] = page_token
return self
def set_member_id_type(self, member_id_type):
# type: (str) -> GroupMemberSimplelistReqCall
self.query_params['member_id_type'] = member_id_type
return self
def set_member_type(self, member_type):
# type: (str) -> GroupMemberSimplelistReqCall
self.query_params['member_type'] = member_type
return self
def do(self):
# type: () -> APIResponse[Type[GroupMemberSimplelistResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/group/:group_id/member/simplelist', 'GET', [ACCESS_TOKEN_TYPE_TENANT],
None, output_class=GroupMemberSimplelistResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class GroupMemberAddReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (GroupMemberService, GroupMemberAddReqBody, List[Any]) -> None
self.service = service
self.body = body
self.path_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_group_id(self, group_id):
# type: (str) -> GroupMemberAddReqCall
self.path_params['group_id'] = group_id
return self
def do(self):
# type: () -> APIResponse[Type[None]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
req = APIRequest('/open-apis/contact/v3/group/:group_id/member/add', 'POST', [ACCESS_TOKEN_TYPE_TENANT],
self.body, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class GroupMemberRemoveReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (GroupMemberService, GroupMemberRemoveReqBody, List[Any]) -> None
self.service = service
self.body = body
self.path_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_group_id(self, group_id):
# type: (str) -> GroupMemberRemoveReqCall
self.path_params['group_id'] = group_id
return self
def do(self):
# type: () -> APIResponse[Type[None]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
req = APIRequest('/open-apis/contact/v3/group/:group_id/member/remove', 'POST', [ACCESS_TOKEN_TYPE_TENANT],
self.body, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class UserDeleteReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (UserService, UserDeleteReqBody, List[Any]) -> None
self.service = service
self.body = body
self.path_params = {} # type: Dict[str, Any]
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_user_id(self, user_id):
# type: (str) -> UserDeleteReqCall
self.path_params['user_id'] = user_id
return self
def set_user_id_type(self, user_id_type):
# type: (str) -> UserDeleteReqCall
self.query_params['user_id_type'] = user_id_type
return self
def do(self):
# type: () -> APIResponse[Type[None]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/users/:user_id', 'DELETE', [ACCESS_TOKEN_TYPE_TENANT],
self.body, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class UserUpdateReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (UserService, User, List[Any]) -> None
self.service = service
self.body = body
self.path_params = {} # type: Dict[str, Any]
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_user_id(self, user_id):
# type: (str) -> UserUpdateReqCall
self.path_params['user_id'] = user_id
return self
def set_user_id_type(self, user_id_type):
# type: (str) -> UserUpdateReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> UserUpdateReqCall
self.query_params['department_id_type'] = department_id_type
return self
def do(self):
# type: () -> APIResponse[Type[UserUpdateResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/users/:user_id', 'PUT', [ACCESS_TOKEN_TYPE_TENANT],
self.body, output_class=UserUpdateResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class UserCreateReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (UserService, User, List[Any]) -> None
self.service = service
self.body = body
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_user_id_type(self, user_id_type):
# type: (str) -> UserCreateReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> UserCreateReqCall
self.query_params['department_id_type'] = department_id_type
return self
def set_client_token(self, client_token):
# type: (str) -> UserCreateReqCall
self.query_params['client_token'] = client_token
return self
def do(self):
# type: () -> APIResponse[Type[UserCreateResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/users', 'POST', [ACCESS_TOKEN_TYPE_TENANT],
self.body, output_class=UserCreateResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class UserPatchReqCall(object):
def __init__(self, service, body, request_opts=None):
# type: (UserService, User, List[Any]) -> None
self.service = service
self.body = body
self.path_params = {} # type: Dict[str, Any]
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_user_id(self, user_id):
# type: (str) -> UserPatchReqCall
self.path_params['user_id'] = user_id
return self
def set_user_id_type(self, user_id_type):
# type: (str) -> UserPatchReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> UserPatchReqCall
self.query_params['department_id_type'] = department_id_type
return self
def do(self):
# type: () -> APIResponse[Type[UserPatchResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/users/:user_id', 'PATCH', [ACCESS_TOKEN_TYPE_TENANT, ACCESS_TOKEN_TYPE_USER],
self.body, output_class=UserPatchResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class UserGetReqCall(object):
def __init__(self, service, request_opts=None):
# type: (UserService, List[Any]) -> None
self.service = service
self.path_params = {} # type: Dict[str, Any]
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_user_id(self, user_id):
# type: (str) -> UserGetReqCall
self.path_params['user_id'] = user_id
return self
def set_user_id_type(self, user_id_type):
# type: (str) -> UserGetReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> UserGetReqCall
self.query_params['department_id_type'] = department_id_type
return self
def do(self):
# type: () -> APIResponse[Type[UserGetResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_path_params(self.path_params)]
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/users/:user_id', 'GET', [ACCESS_TOKEN_TYPE_TENANT, ACCESS_TOKEN_TYPE_USER],
None, output_class=UserGetResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
class UserListReqCall(object):
def __init__(self, service, request_opts=None):
# type: (UserService, List[Any]) -> None
self.service = service
self.query_params = {} # type: Dict[str, Any]
if request_opts:
self.request_opts = request_opts
else:
self.request_opts = [] # type: List[Any]
def set_user_id_type(self, user_id_type):
# type: (str) -> UserListReqCall
self.query_params['user_id_type'] = user_id_type
return self
def set_department_id_type(self, department_id_type):
# type: (str) -> UserListReqCall
self.query_params['department_id_type'] = department_id_type
return self
def set_department_id(self, department_id):
# type: (str) -> UserListReqCall
self.query_params['department_id'] = department_id
return self
def set_page_token(self, page_token):
# type: (str) -> UserListReqCall
self.query_params['page_token'] = page_token
return self
def set_page_size(self, page_size):
# type: (int) -> UserListReqCall
self.query_params['page_size'] = page_size
return self
def do(self):
# type: () -> APIResponse[Type[UserListResult]]
root_service = self.service.service
conf = root_service.conf
self.request_opts += [set_query_params(self.query_params)]
req = APIRequest('/open-apis/contact/v3/users', 'GET', [ACCESS_TOKEN_TYPE_TENANT, ACCESS_TOKEN_TYPE_USER],
None, output_class=UserListResult, request_opts=self.request_opts)
resp = req.do(conf)
return resp
| 34.265208
| 138
| 0.635548
| 5,814
| 50,130
| 5.205366
| 0.027692
| 0.132302
| 0.063442
| 0.031721
| 0.86099
| 0.852465
| 0.837298
| 0.827419
| 0.793121
| 0.775542
| 0
| 0.000754
| 0.259106
| 50,130
| 1,462
| 139
| 34.288646
| 0.814087
| 0.176581
| 0
| 0.843159
| 1
| 0
| 0.048306
| 0.026371
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173526
| false
| 0
| 0.005562
| 0
| 0.352614
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
28cd62e820c61fd82d543321b1b497990059f4fd
| 27,906
|
py
|
Python
|
lepidoptera/migrations/0001_initial.py
|
BelgianBiodiversityPlatform/catalogue-lepidoptera-belgium-webapp
|
c0ed7fde78f9af5afbd91c53ed3f543046e8dd41
|
[
"BSD-2-Clause-FreeBSD"
] | 2
|
2020-11-17T18:02:44.000Z
|
2021-02-02T04:00:21.000Z
|
lepidoptera/migrations/0001_initial.py
|
BelgianBiodiversityPlatform/catalogue-lepidoptera-belgium-webapp
|
c0ed7fde78f9af5afbd91c53ed3f543046e8dd41
|
[
"BSD-2-Clause-FreeBSD"
] | 5
|
2020-06-05T19:00:59.000Z
|
2022-01-13T00:52:30.000Z
|
lepidoptera/migrations/0001_initial.py
|
BelgianBiodiversityPlatform/catalogue-lepidoptera-belgium-webapp
|
c0ed7fde78f9af5afbd91c53ed3f543046e8dd41
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
# Generated by Django 2.0.6 on 2018-07-12 07:27
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import lepidoptera.models
import markdownx.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Family',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('vernacular_name', models.CharField(blank=True, max_length=255)),
('vernacular_name_de', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_en', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_fr', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_nl', models.CharField(blank=True, max_length=255, null=True)),
('wikidata_id', models.CharField(blank=True, max_length=255)),
('last_modified', models.DateTimeField(auto_now=True)),
('author', models.CharField(max_length=255)),
('text', markdownx.models.MarkdownxField(blank=True)),
('text_de', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_en', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_fr', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_nl', markdownx.models.MarkdownxField(blank=True, null=True)),
('display_order', models.IntegerField(unique=True)),
('verbatim_family_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
('representative_picture', models.ImageField(blank=True, null=True, upload_to='family_representative_pictures')),
],
options={
'verbose_name_plural': 'families',
},
bases=(lepidoptera.models.DisplayOrderNavigable, models.Model),
),
migrations.CreateModel(
name='Genus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('vernacular_name', models.CharField(blank=True, max_length=255)),
('vernacular_name_de', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_en', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_fr', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_nl', models.CharField(blank=True, max_length=255, null=True)),
('wikidata_id', models.CharField(blank=True, max_length=255)),
('last_modified', models.DateTimeField(auto_now=True)),
('author', models.CharField(max_length=255)),
('text', markdownx.models.MarkdownxField(blank=True)),
('text_de', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_en', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_fr', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_nl', markdownx.models.MarkdownxField(blank=True, null=True)),
('display_order', models.IntegerField(unique=True)),
('verbatim_genus_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
('family', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Family')),
],
options={
'verbose_name_plural': 'genera',
},
bases=(lepidoptera.models.ParentForAdminListMixin, models.Model),
),
migrations.CreateModel(
name='HostPlantFamily',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('vernacular_name', models.CharField(blank=True, max_length=255)),
('vernacular_name_de', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_en', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_fr', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_nl', models.CharField(blank=True, max_length=255, null=True)),
('wikidata_id', models.CharField(blank=True, max_length=255)),
('last_modified', models.DateTimeField(auto_now=True)),
('verbatim_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
],
options={
'verbose_name_plural': 'Host plant families',
'ordering': ['name'],
},
),
migrations.CreateModel(
name='HostPlantGenus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('vernacular_name', models.CharField(blank=True, max_length=255)),
('vernacular_name_de', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_en', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_fr', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_nl', models.CharField(blank=True, max_length=255, null=True)),
('wikidata_id', models.CharField(blank=True, max_length=255)),
('last_modified', models.DateTimeField(auto_now=True)),
('verbatim_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
('author', models.CharField(blank=True, max_length=255)),
('family', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.HostPlantFamily')),
],
options={
'verbose_name_plural': 'Host plant genera',
'ordering': ['name'],
},
),
migrations.CreateModel(
name='HostPlantSpecies',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('vernacular_name', models.CharField(blank=True, max_length=255)),
('vernacular_name_de', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_en', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_fr', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_nl', models.CharField(blank=True, max_length=255, null=True)),
('wikidata_id', models.CharField(blank=True, max_length=255)),
('last_modified', models.DateTimeField(auto_now=True)),
('verbatim_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
('author', models.CharField(blank=True, max_length=255)),
('genus', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.HostPlantGenus')),
],
options={
'verbose_name_plural': 'Host plant species',
'ordering': ['name'],
},
),
migrations.CreateModel(
name='Journal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('verbatim_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
('title', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='Observation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('plant_genus', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.HostPlantGenus')),
('plant_species', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.HostPlantSpecies')),
],
),
migrations.CreateModel(
name='PageFragment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('identifier', models.SlugField(unique=True)),
('content_nl', markdownx.models.MarkdownxField(blank=True)),
('content_en', markdownx.models.MarkdownxField(blank=True)),
('content_fr', markdownx.models.MarkdownxField(blank=True)),
('content_de', markdownx.models.MarkdownxField(blank=True)),
],
),
migrations.CreateModel(
name='Photographer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(max_length=100)),
('verbatim_photographer_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
],
options={
'ordering': ('full_name',),
},
),
migrations.CreateModel(
name='Province',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('code', models.CharField(max_length=3, unique=True)),
('order', models.IntegerField(help_text='In presence tables: order in which the provinces are displayed', unique=True)),
('historical', models.BooleanField(help_text="The province doesn't exists anymore")),
('recent', models.BooleanField(help_text='The province was created at province split')),
('polygon_reference', models.IntegerField(unique=True)),
],
options={
'ordering': ['order'],
},
),
migrations.CreateModel(
name='Publication',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('verbatim_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
('author', models.CharField(max_length=255)),
('title', models.CharField(max_length=255)),
('publisher', models.CharField(blank=True, max_length=255)),
('year', models.CharField(max_length=20)),
('volume', models.CharField(max_length=20)),
('issue', models.CharField(blank=True, max_length=20)),
('page_numbers', models.CharField(blank=True, max_length=255)),
('journal', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Journal')),
],
options={
'ordering': ['title'],
},
),
migrations.CreateModel(
name='Species',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('vernacular_name', models.CharField(blank=True, max_length=255)),
('vernacular_name_de', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_en', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_fr', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_nl', models.CharField(blank=True, max_length=255, null=True)),
('wikidata_id', models.CharField(blank=True, max_length=255)),
('last_modified', models.DateTimeField(auto_now=True)),
('author', models.CharField(max_length=255)),
('text', markdownx.models.MarkdownxField(blank=True)),
('text_de', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_en', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_fr', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_nl', markdownx.models.MarkdownxField(blank=True, null=True)),
('display_order', models.IntegerField(unique=True)),
('verbatim_species_number', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
('code', models.CharField(max_length=50, unique=True, validators=[lepidoptera.models.validate_only_numbers_and_uppercase, django.core.validators.MinLengthValidator(4)], verbose_name='Species code')),
('first_mention_page', models.CharField(blank=True, max_length=100, verbose_name='page')),
('first_mention_link', models.URLField(blank=True, verbose_name='hyperlink')),
('imago_section_text', markdownx.models.MarkdownxField(blank=True)),
('larva_section_text', markdownx.models.MarkdownxField(blank=True)),
('egg_section_text', markdownx.models.MarkdownxField(blank=True)),
('case_section_text', markdownx.models.MarkdownxField(blank=True)),
('bag_section_text', markdownx.models.MarkdownxField(blank=True)),
('mine_section_text', markdownx.models.MarkdownxField(blank=True)),
('cocoon_section_text', markdownx.models.MarkdownxField(blank=True)),
('bionomics_section_text', markdownx.models.MarkdownxField(blank=True)),
('habitat_section_text', markdownx.models.MarkdownxField(blank=True)),
('hostplants_section_text', markdownx.models.MarkdownxField(blank=True)),
('flightperiod_section_text', markdownx.models.MarkdownxField(blank=True)),
('genus_name', models.CharField(editable=False, max_length=255)),
('first_mention_publication', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Publication', verbose_name='publication')),
('genus', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Genus')),
],
options={
'verbose_name_plural': 'species',
},
bases=(lepidoptera.models.ParentForAdminListMixin, models.Model),
),
migrations.CreateModel(
name='SpeciesPicture',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(blank=True, null=True, upload_to='specimen_pictures')),
('image_subject', models.CharField(choices=[('MUSEUM_SPECIMEN', 'Museum specimen'), ('IN_VIVO_SPECIMEN', 'In Vivo Specimen'), ('PRE_ADULT_STAGE', 'Pre-adult stage'), ('HOST_PLANT', 'Host plant'), ('BIONOMICS', 'Bionomics'), ('HABITAT', 'Habitat')], max_length=20)),
('verbatim_image_filename', models.CharField(max_length=255)),
('specimen_stage', models.CharField(blank=True, choices=[('i', 'Imago'), ('e', 'Egg'), ('l', 'Larva'), ('c', 'Case'), ('b', 'Bag'), ('m', 'Mine'), ('p', 'Pupa/Cocoon')], max_length=1)),
('specimen_sex', models.CharField(blank=True, choices=[('M', 'Male'), ('F', 'Female'), ('A', 'Adult')], max_length=1)),
('side', models.CharField(blank=True, choices=[('UPPER', 'Upper'), ('UNDER', 'Under')], max_length=5)),
('gallery_order', models.IntegerField(help_text='Order in various galleries. Smaller numbers comes first!')),
('photographer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='lepidoptera.Photographer')),
('species', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Species')),
],
),
migrations.CreateModel(
name='SpeciesPresence',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('present', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Status',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('verbatim_status_id', models.IntegerField(help_text='From the Access database', unique=True)),
('name', models.CharField(max_length=255)),
],
options={
'verbose_name_plural': 'statuses',
},
),
migrations.CreateModel(
name='Subfamily',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('vernacular_name', models.CharField(blank=True, max_length=255)),
('vernacular_name_de', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_en', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_fr', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_nl', models.CharField(blank=True, max_length=255, null=True)),
('wikidata_id', models.CharField(blank=True, max_length=255)),
('last_modified', models.DateTimeField(auto_now=True)),
('author', models.CharField(max_length=255)),
('text', markdownx.models.MarkdownxField(blank=True)),
('text_de', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_en', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_fr', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_nl', markdownx.models.MarkdownxField(blank=True, null=True)),
('display_order', models.IntegerField(unique=True)),
('verbatim_subfamily_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
('family', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Family')),
('status', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Status')),
],
options={
'verbose_name_plural': 'subfamilies',
},
),
migrations.CreateModel(
name='Subgenus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('vernacular_name', models.CharField(blank=True, max_length=255)),
('vernacular_name_de', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_en', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_fr', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_nl', models.CharField(blank=True, max_length=255, null=True)),
('wikidata_id', models.CharField(blank=True, max_length=255)),
('last_modified', models.DateTimeField(auto_now=True)),
('author', models.CharField(max_length=255)),
('text', markdownx.models.MarkdownxField(blank=True)),
('text_de', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_en', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_fr', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_nl', markdownx.models.MarkdownxField(blank=True, null=True)),
('display_order', models.IntegerField(unique=True)),
('verbatim_subgenus_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
('genus_name', models.CharField(editable=False, max_length=255)),
('genus', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Genus')),
('status', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Status')),
],
options={
'verbose_name_plural': 'subgenera',
},
),
migrations.CreateModel(
name='Substrate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('lepidoptera_species', models.ManyToManyField(through='lepidoptera.Observation', to='lepidoptera.Species')),
],
),
migrations.CreateModel(
name='TimePeriod',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('icon', models.ImageField(upload_to='time_period_icons')),
],
),
migrations.CreateModel(
name='Tribus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('vernacular_name', models.CharField(blank=True, max_length=255)),
('vernacular_name_de', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_en', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_fr', models.CharField(blank=True, max_length=255, null=True)),
('vernacular_name_nl', models.CharField(blank=True, max_length=255, null=True)),
('wikidata_id', models.CharField(blank=True, max_length=255)),
('last_modified', models.DateTimeField(auto_now=True)),
('author', models.CharField(max_length=255)),
('text', markdownx.models.MarkdownxField(blank=True)),
('text_de', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_en', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_fr', markdownx.models.MarkdownxField(blank=True, null=True)),
('text_nl', markdownx.models.MarkdownxField(blank=True, null=True)),
('display_order', models.IntegerField(unique=True)),
('verbatim_tribus_id', models.IntegerField(blank=True, help_text='From the Access database', null=True, unique=True)),
('status', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Status')),
('subfamily', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Subfamily')),
],
options={
'verbose_name_plural': 'tribus',
},
),
migrations.AddField(
model_name='speciespresence',
name='period',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.TimePeriod'),
),
migrations.AddField(
model_name='speciespresence',
name='province',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Province'),
),
migrations.AddField(
model_name='speciespresence',
name='species',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Species'),
),
migrations.AddField(
model_name='species',
name='status',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Status'),
),
migrations.AddField(
model_name='species',
name='subgenus',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Subgenus'),
),
migrations.AddField(
model_name='species',
name='synonym_of',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='synonyms', to='lepidoptera.Species'),
),
migrations.AddField(
model_name='observation',
name='species',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Species'),
),
migrations.AddField(
model_name='observation',
name='substrate',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Substrate'),
),
migrations.AddField(
model_name='hostplantspecies',
name='lepidoptera_species',
field=models.ManyToManyField(through='lepidoptera.Observation', to='lepidoptera.Species'),
),
migrations.AddField(
model_name='hostplantgenus',
name='lepidoptera_species',
field=models.ManyToManyField(through='lepidoptera.Observation', to='lepidoptera.Species'),
),
migrations.AddField(
model_name='genus',
name='status',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Status'),
),
migrations.AddField(
model_name='genus',
name='subfamily',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Subfamily'),
),
migrations.AddField(
model_name='genus',
name='synonym_of',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='synonyms', to='lepidoptera.Genus'),
),
migrations.AddField(
model_name='genus',
name='tribus',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Tribus'),
),
migrations.AddField(
model_name='family',
name='status',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='lepidoptera.Status'),
),
migrations.AlterUniqueTogether(
name='speciespresence',
unique_together={('species', 'province', 'period')},
),
]
| 60.930131
| 281
| 0.603956
| 2,831
| 27,906
| 5.786648
| 0.077711
| 0.074167
| 0.060798
| 0.092296
| 0.839031
| 0.808265
| 0.757295
| 0.719509
| 0.714504
| 0.696069
| 0
| 0.013662
| 0.252455
| 27,906
| 457
| 282
| 61.063457
| 0.771631
| 0.001613
| 0
| 0.688889
| 1
| 0
| 0.172763
| 0.019599
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011111
| 0
| 0.02
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3ab06d8a1499273e326cfed4b283af32449959f7
| 5,387
|
py
|
Python
|
plotting/visualise_viability.py
|
PFMassiani/vibly
|
5e552cb2ed375d5cd378b92de2f2473485211f2f
|
[
"MIT"
] | 5
|
2020-06-08T09:44:44.000Z
|
2022-03-12T06:13:10.000Z
|
plotting/visualise_viability.py
|
PFMassiani/vibly
|
5e552cb2ed375d5cd378b92de2f2473485211f2f
|
[
"MIT"
] | 7
|
2019-09-27T07:05:34.000Z
|
2020-04-27T14:15:07.000Z
|
plotting/visualise_viability.py
|
PFMassiani/vibly
|
5e552cb2ed375d5cd378b92de2f2473485211f2f
|
[
"MIT"
] | 6
|
2019-10-02T09:55:10.000Z
|
2021-03-08T13:55:21.000Z
|
import numpy as np
import matplotlib.pyplot as plt
state_colormap = 'viridis' # other sequential colormaps: 'plasma', 'gray'
change_colormap = 'coolwarm' # other diverging colormaps: 'bwr', 'RdBu'
def visualise(data, initial_conditions = [], include_end_state = False,
state_colormap = state_colormap, change_colormap = change_colormap):
Q_map = data['Q_map']
Q_V = data['Q_V']
grids = data['grids']
s_grid = grids['states']
a_grid = grids['actions']
s_min = s_grid[0]
s_max = s_grid[-1]
a_min = a_grid[0]
a_max = a_grid[-1]
initial_state = np.repeat(np.array([s_grid]), len(a_grid), axis = 0).T
end_state = np.zeros((len(s_grid), len(a_grid)))
end_state[Q_V == 1] = Q_map[Q_V == 1]
end_state[Q_V != 1] = np.nan
change_state = end_state - initial_state
if include_end_state:
fig, (ax2, ax) = plt.subplots(nrows=2)
im2 = ax2.imshow(end_state, origin = 'lower', extent = [a_min, a_max, s_min, s_max], cmap = state_colormap)
fig.colorbar(im2, ax=ax2)
ax2.set_ylabel('State')
ax2.set_title('End state')
ax2.set_xticks([])
else:
fig, ax = plt.subplots()
min_change = np.min(change_state[Q_V == 1])
max_change = np.max(change_state[Q_V == 1])
change = max(abs(min_change), abs(max_change))
im = ax.imshow(change_state, origin = 'lower', extent = [a_min, a_max, s_min, s_max], vmin = - change, vmax = change, cmap = change_colormap)
fig.colorbar(im, ax=ax)
ax.set_xlabel('Angle of attack (rad)')
ax.set_ylabel('State')
ax.set_title('Change in state')
if len(initial_conditions) > 0:
## To determine the lower bound on state
s_lower = np.cos(a_grid)/(initial_conditions[1]+initial_conditions[2]**2/(2*9.81)) # height at touchdown / (height + speed^2/2g)
ax.plot(a_grid, s_lower, color = 'k')
ax.fill_between(a_grid, s_lower, color = 'grey')
if include_end_state:
ax2.plot(a_grid, s_lower, color = 'k')
ax2.fill_between(a_grid, s_lower, color = 'grey')
plt.show(block=False)
def visualise_no_dict(s_grid, a_grid, Q_map, Q_V, initial_conditions = [], include_end_state = False,
state_colormap = state_colormap, change_colormap = change_colormap):
s_min = s_grid[0]
s_max = s_grid[-1]
a_min = a_grid[0]
a_max = a_grid[-1]
initial_state = np.repeat(np.array([s_grid]), len(a_grid), axis = 0).T
end_state = np.zeros((len(s_grid), len(a_grid)))
end_state[Q_V == 1] = Q_map[Q_V == 1]
end_state[Q_V != 1] = np.nan
change_state = end_state - initial_state
if include_end_state:
fig, (ax2, ax) = plt.subplots(nrows=2)
im2 = ax2.imshow(end_state, origin = 'lower', extent = [a_min, a_max, s_min, s_max], cmap = state_colormap)
fig.colorbar(im2, ax=ax2)
ax2.set_ylabel('State')
ax2.set_title('End state')
ax2.set_xticks([])
else:
fig, ax = plt.subplots()
min_change = np.min(change_state[Q_V == 1])
max_change = np.max(change_state[Q_V == 1])
change = max(abs(min_change), abs(max_change))
im = ax.imshow(change_state, origin = 'lower', extent = [a_min, a_max, s_min, s_max], vmin = - change, vmax = change, cmap = change_colormap)
fig.colorbar(im, ax=ax)
ax.set_xlabel('Angle of attack (rad)')
ax.set_ylabel('State')
ax.set_title('Change in state')
if len(initial_conditions) > 0:
## To determine the lower bound on state
s_lower = np.cos(a_grid)/(initial_conditions[1]+initial_conditions[2]**2/(2*9.81)) # height at touchdown / (height + speed^2/2g)
ax.plot(a_grid, s_lower, color = 'k')
ax.fill_between(a_grid, s_lower, color = 'grey')
if include_end_state:
ax2.plot(a_grid, s_lower, color = 'k')
ax2.fill_between(a_grid, s_lower, color = 'grey')
plt.show(block=False)
def visualise_old(data, initial_conditions = [], include_end_state = False, state_colormap = state_colormap, change_colormap = change_colormap):
Q_map = data['Q_map']
Q_V = data['Q_V']
grids = data['grids']
s_grid = grids['states']
a_grid = grids['actions']
s_min = s_grid[0]
s_max = s_grid[-1]
a_min = a_grid[0]
a_max = a_grid[-1]
initial_state = np.repeat(np.array([s_grid]), len(a_grid), axis = 0).T
end_state = np.zeros((len(s_grid), len(a_grid)))
end_state[Q_V == 1] = Q_map[Q_V == 1]
end_state[Q_V != 1] = np.nan
change_state = end_state - initial_state
plt.figure()
if include_end_state:
plt.subplot(211)
plt.imshow(end_state, origin = 'lower',
extent = [a_min, a_max, s_min, s_max], cmap = state_colormap)
plt.colorbar()
plt.subplot(212)
min_change = np.min(change_state[Q_V == 1])
max_change = np.max(change_state[Q_V == 1])
change = max(abs(min_change), abs(max_change))
plt.imshow(change_state, origin = 'lower',
extent = [a_min, a_max, s_min, s_max],
vmin = - change, vmax = change, cmap = change_colormap)
plt.colorbar()
if len(initial_conditions) > 0:
## To determine the lower bound on state
# height at touchdown / (height + speed^2/2g)
s_lower = np.cos(a_grid)/(initial_conditions[1] +
initial_conditions[2]**2/(2*9.81))
plt.plot(a_grid, s_lower, color = 'k')
plt.fill_between(a_grid, s_lower, color = 'grey')
if include_end_state:
plt.subplot(2,1,1)
plt.plot(a_grid, s_lower, color = 'k')
plt.fill_between(a_grid, s_lower, color = 'grey')
## Labels
if include_end_state:
plt.subplot(2,1,1)
plt.xticks([])
plt.ylabel('State')
plt.title('End state')
plt.subplot(2,1,2)
plt.xlabel('Angle of attack (rad)')
plt.ylabel('State')
plt.title('Change in state')
| 33.66875
| 145
| 0.682198
| 927
| 5,387
| 3.703344
| 0.114347
| 0.043694
| 0.013108
| 0.027964
| 0.922225
| 0.903
| 0.88931
| 0.88028
| 0.88028
| 0.88028
| 0
| 0.021408
| 0.158901
| 5,387
| 159
| 146
| 33.880503
| 0.736261
| 0.062744
| 0
| 0.828125
| 0
| 0
| 0.057994
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023438
| false
| 0
| 0.015625
| 0
| 0.039063
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3ad601c320633a1634d7e7ad8913defa2352f198
| 209
|
py
|
Python
|
tf_model_zoo/__init__.py
|
zhang-can/ECO-pytorch
|
355c3866b35cdaa5d451263c1f3291c150e22eeb
|
[
"BSD-2-Clause"
] | 192
|
2018-06-13T06:42:03.000Z
|
2022-02-22T11:26:41.000Z
|
tf_model_zoo/__init__.py
|
aimeng100/ECO-pytorch
|
f1ea392eea8d651c123fff3b876e233ec51bc2cb
|
[
"BSD-2-Clause"
] | 64
|
2018-06-20T10:14:17.000Z
|
2021-09-08T05:58:25.000Z
|
tf_model_zoo/__init__.py
|
aimeng100/ECO-pytorch
|
f1ea392eea8d651c123fff3b876e233ec51bc2cb
|
[
"BSD-2-Clause"
] | 43
|
2018-06-30T00:59:43.000Z
|
2021-08-25T02:21:23.000Z
|
from .inceptionresnetv2.pytorch_load import inceptionresnetv2
from .inceptionv4.pytorch_load import inceptionv4
from .bninception.pytorch_load import BNInception, InceptionV3
from .ECO.pytorch_load import ECO
| 41.8
| 62
| 0.875598
| 25
| 209
| 7.16
| 0.36
| 0.24581
| 0.379888
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026042
| 0.08134
| 209
| 4
| 63
| 52.25
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3ae583d3b140fe58b0655a068f01aefe20ec56b2
| 6,965
|
py
|
Python
|
auxiliary/generate_figures.py
|
mchandra12/replication_gonzalez_2013
|
ff8c342e1cd9af225ccb9a4e63069fef1e49431d
|
[
"MIT"
] | null | null | null |
auxiliary/generate_figures.py
|
mchandra12/replication_gonzalez_2013
|
ff8c342e1cd9af225ccb9a4e63069fef1e49431d
|
[
"MIT"
] | null | null | null |
auxiliary/generate_figures.py
|
mchandra12/replication_gonzalez_2013
|
ff8c342e1cd9af225ccb9a4e63069fef1e49431d
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
import statsmodels.formula.api as smf
from auxiliary.generate_tables import *
from auxiliary.pre_process_datasets import *
from auxiliary.run_regressions import *
# import pandas as pd
def create_fig_hh_expenditure():
df3_avg = df3.groupby(["month"]).mean()
# Turn month back to a column from index
df3_avg.reset_index(inplace=True)
plt.figure(figsize=(13, 4))
plt.subplot(1, 2, 1)
df_rdd_left = df3_avg.loc[(df3_avg["month"] < 0) & (df3_avg["month"] > -31)]
df_rdd_right = df3_avg.loc[(df3_avg["month"] > 0) & (df3_avg["month"] < 20)]
coeffs = np.polyfit(df_rdd_left["month"], df_rdd_left["gastmon"], 2)
poly = np.poly1d(coeffs)
predict_x = np.linspace(df_rdd_left["month"].iloc[0], df_rdd_left["month"].iloc[-1])
predict_y = poly(predict_x)
plt.plot(
df_rdd_left["month"], df_rdd_left["gastmon"], "o", predict_x, predict_y, c="g"
)
coeffsr = np.polyfit(df_rdd_right["month"], df_rdd_right["gastmon"], 2)
polyr = np.poly1d(coeffsr)
predict_xr = np.linspace(
df_rdd_right["month"].iloc[0], df_rdd_right["month"].iloc[-1]
)
predict_yr = poly(predict_xr)
plt.plot(
df_rdd_right["month"],
df_rdd_right["gastmon"],
"o",
predict_xr,
predict_yr,
c="y",
)
plt.ylim(0, 60000)
plt.xlim(-30, 20)
plt.xlabel("Month of birth (0=July 2007)")
plt.axvline(x=0, color="k")
plt.grid(True)
plt.title("Total expenditure by month of birth")
plt.subplot(1, 2, 2)
# df_rdd_left = df3_avg.loc[(df3_avg["month"] < 0) & (df3_avg["month"] > -31)]
# df_rdd_right = df3_avg.loc[(df3_avg["month"] > 0) & (df3_avg["month"] < 20)]
coeffs = np.polyfit(df_rdd_left["month"], df_rdd_left["c_m_exp"], 2)
poly = np.poly1d(coeffs)
predict_x = np.linspace(df_rdd_left["month"].iloc[0], df_rdd_left["month"].iloc[-1])
predict_y = poly(predict_x)
plt.plot(
df_rdd_left["month"], df_rdd_left["c_m_exp"], "o", predict_x, predict_y, c="g"
)
coeffsr = np.polyfit(df_rdd_right["month"], df_rdd_right["c_m_exp"], 3)
polyr = np.poly1d(coeffsr)
predict_xr = np.linspace(
df_rdd_right["month"].iloc[0], df_rdd_right["month"].iloc[-1]
)
predict_yr = polyr(predict_xr)
plt.plot(
df_rdd_right["month"],
df_rdd_right["c_m_exp"],
"o",
predict_xr,
predict_yr,
c="y",
)
plt.ylim(0, 10000)
plt.xlim(-30, 20)
plt.xlabel("Month of birth (0=July 2007)")
plt.axvline(x=0, color="k")
plt.grid(True)
plt.title("Child-related expenditure by month of birth")
plt.suptitle(
"Household Expenditure (Annual) by Month of Birth (HBS 2008)",
verticalalignment="baseline",
fontsize=14,
)
return
def create_daycare_figure():
df3_avg = df3.groupby(["month"]).mean()
# Turn month back to a column from index
df3_avg.reset_index(inplace=True)
plt.figure(figsize=(13, 4))
plt.subplot(1, 2, 1)
df_rdd_left = df3_avg.loc[(df3_avg["month"] < 0) & (df3_avg["month"] > -31)]
df_rdd_right = df3_avg.loc[(df3_avg["month"] > 0) & (df3_avg["month"] < 20)]
res = smf.ols(formula="m_exp12312 ~ month", data=df_rdd_left).fit()
a1 = res.predict(df_rdd_left)
res2 = smf.ols(formula="m_exp12312 ~ month", data=df_rdd_right).fit()
a2 = res2.predict(df_rdd_right)
plt.scatter(df3_avg["month"], df3_avg["m_exp12312"], edgecolors="r")
plt.plot(df_rdd_left["month"], a1)
plt.plot(df_rdd_right["month"], a2)
plt.ylim(0, 1000)
plt.xlim(-30, 20)
plt.xlabel("Month of birth (0=July 2007)")
plt.axvline(x=0, color="k")
plt.grid(True)
plt.title("Daycare expenditure by month of birth")
plt.subplot(1, 2, 2)
res = smf.ols(formula="daycare_bin ~ month", data=df_rdd_left).fit()
a1 = res.predict(df_rdd_left)
res2 = smf.ols(formula="daycare_bin ~ month", data=df_rdd_right).fit()
a2 = res2.predict(df_rdd_right)
plt.scatter(df3_avg["month"], df3_avg["daycare_bin"], edgecolors="r")
plt.plot(df_rdd_left["month"], a1)
plt.plot(df_rdd_right["month"], a2)
plt.ylim(0, 0.7)
plt.xlim(-30, 20)
plt.xlabel("Month of birth (0=July 2007)")
plt.axvline(x=0, color="k")
plt.title("Fraction with positive daycare expenditure by month of birth")
plt.grid(True)
plt.suptitle(
"Daycare Expenditure by Month of Birth (HBS 2008)",
verticalalignment="baseline",
fontsize=14,
)
return
def create_fertility_figure():
df1_avg = df1.groupby(["conception_month"]).mean()
# Turn month back to a column from index
df1_avg.reset_index(inplace=True)
df2_avg = df2.groupby(["abortion_month"]).mean()
# Turn month back to a column from index
df2_avg.reset_index(inplace=True)
plt.figure(figsize=(13, 4))
plt.subplot(1, 2, 1)
df_rdd_left = df1_avg.loc[
(df1_avg["conception_month"] < 0) & (df1_avg["conception_month"] > -31)
]
df_rdd_right = df1_avg.loc[
(df1_avg["conception_month"] > 0) & (df1_avg["conception_month"] < 20)
]
res = smf.ols(
formula="number_conceptions ~ conception_month", data=df_rdd_left
).fit()
a1 = res.predict(df_rdd_left)
res2 = smf.ols(
formula="number_conceptions ~ conception_month", data=df_rdd_right
).fit()
a2 = res2.predict(df_rdd_right)
plt.scatter(
df1_avg["conception_month"], df1_avg["number_conceptions"], edgecolors="r"
)
plt.plot(df_rdd_left["conception_month"], a1)
plt.plot(df_rdd_right["conception_month"], a2)
plt.ylim(30000, 50000)
plt.xlim(-15, 15)
plt.xlabel("Month of conception (0=July 2007)")
plt.axvline(x=0, color="k")
plt.grid(True)
plt.title("Number of conceptions by month")
plt.subplot(1, 2, 2)
df_rdd_left = df2_avg.loc[
(df2_avg["abortion_month"] < 0) & (df2_avg["abortion_month"] > -31)
]
df_rdd_right = df2_avg.loc[
(df2_avg["abortion_month"] > 0) & (df2_avg["abortion_month"] < 20)
]
res = smf.ols(
formula="ln_number_abortions ~ abortion_month", data=df_rdd_left
).fit()
a1 = res.predict(df_rdd_left)
res2 = smf.ols(
formula="ln_number_abortions ~ abortion_month", data=df_rdd_right
).fit()
a2 = res2.predict(df_rdd_right)
plt.scatter(
df2_avg["abortion_month"], df2_avg["ln_number_abortions"], edgecolors="r"
)
plt.plot(df_rdd_left["abortion_month"], a1)
plt.plot(df_rdd_right["abortion_month"], a2)
# plt.ylim(10000, 22000)
plt.xlim(-30, 20)
plt.xlabel("Month of abortion (0=July 2007)")
plt.axvline(x=0, color="k")
plt.title("Number of abortions by month")
plt.grid(True)
plt.suptitle(
"Fertility Effect: Conceptions and Abortions by month",
verticalalignment="baseline",
fontsize=14,
)
return
| 29.764957
| 88
| 0.634027
| 1,050
| 6,965
| 3.977143
| 0.132381
| 0.069444
| 0.0625
| 0.034483
| 0.824713
| 0.793582
| 0.779454
| 0.744971
| 0.726533
| 0.711207
| 0
| 0.050136
| 0.20962
| 6,965
| 233
| 89
| 29.892704
| 0.708447
| 0.051113
| 0
| 0.553672
| 1
| 0
| 0.203636
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016949
| false
| 0
| 0.033898
| 0
| 0.067797
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aaeeec2348acda5a46d054130f43125e1fffb71a
| 3,499
|
py
|
Python
|
tests/test_class_oelint_vars_srcurireqopt.py
|
skycaptain/oelint-adv
|
ff67d3149cf8b1de2b0b2d158a68f4e2cf5e9e46
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_class_oelint_vars_srcurireqopt.py
|
skycaptain/oelint-adv
|
ff67d3149cf8b1de2b0b2d158a68f4e2cf5e9e46
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_class_oelint_vars_srcurireqopt.py
|
skycaptain/oelint-adv
|
ff67d3149cf8b1de2b0b2d158a68f4e2cf5e9e46
|
[
"BSD-2-Clause"
] | null | null | null |
import pytest
from .base import TestBaseClass
class TestClassOelintVarsSRCURIReqOpt(TestBaseClass):
@pytest.mark.parametrize('id', ['oelint.vars.srcurioptions'])
@pytest.mark.parametrize('occurrence', [1])
@pytest.mark.parametrize('input',
[
{
'oelint_adv_test.bb':
'SRC_URI += "git://foo.org/gaz.git;name=foo;branch=master"',
},
{
'oelint_adv_test.bb':
'SRC_URI += "gitsm://foo.org/gaz.git;name=foo;branch=master"',
},
{
'oelint_adv_test.bb':
'SRC_URI += "git://foo.org/gaz.git;name=foo;protocol=ssh"',
},
{
'oelint_adv_test.bb':
'SRC_URI += "gitsm://foo.org/gaz.git;name=foo;protocol=ssh"',
},
],
)
def test_bad(self, input, id, occurrence):
self.check_for_id(self._create_args(input), id, occurrence)
@pytest.mark.parametrize('id', ['oelint.vars.srcurioptions'])
@pytest.mark.parametrize('occurrence', [2])
@pytest.mark.parametrize('input',
[
{
'oelint_adv_test.bb':
'SRC_URI += "git://foo.org/gaz.git;name=foo"',
},
{
'oelint_adv_test.bb':
'SRC_URI += "gitsm://foo.org/gaz.git;name=foo"',
},
],
)
def test_really_bad(self, input, id, occurrence):
self.check_for_id(self._create_args(input), id, occurrence)
@pytest.mark.parametrize('id', ['oelint.vars.srcurioptions'])
@pytest.mark.parametrize('occurrence', [0])
@pytest.mark.parametrize('input',
[
{
'oelint_adv_test.bb':
'SRC_URI += "git://foo.org/gaz.git;name=foo;protocol=ssh;branch=main"',
},
{
'oelint_adv_test.bb':
'SRC_URI += "gitsm://foo.org/gaz.git;name=foo;protocol=ssh;branch=main"',
},
{
'oelint_adv_test.bb':
'SRC_URI += "git://foo.org/gaz.git;name=foo;protocol=ssh;nobranch=1"',
},
{
'oelint_adv_test.bb':
'SRC_URI += "gitsm://foo.org/gaz.git;name=foo;protocol=ssh;nobranch=1"',
},
],
)
def test_good(self, input, id, occurrence):
self.check_for_id(self._create_args(input), id, occurrence)
| 47.283784
| 110
| 0.356959
| 263
| 3,499
| 4.574144
| 0.1673
| 0.074813
| 0.108063
| 0.124688
| 0.898587
| 0.898587
| 0.898587
| 0.898587
| 0.898587
| 0.898587
| 0
| 0.003041
| 0.530151
| 3,499
| 73
| 111
| 47.931507
| 0.72871
| 0
| 0
| 0.328358
| 0
| 0.119403
| 0.256645
| 0.159188
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044776
| false
| 0
| 0.029851
| 0
| 0.089552
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c90d96e1e22f9db6bb7aa0c259d156bcfe3819e2
| 2,482
|
py
|
Python
|
morepath/tests/test_view_directive.py
|
DuncanBetts/morepath
|
acad10489b051df9c512f6735a9338854745a599
|
[
"BSD-3-Clause"
] | null | null | null |
morepath/tests/test_view_directive.py
|
DuncanBetts/morepath
|
acad10489b051df9c512f6735a9338854745a599
|
[
"BSD-3-Clause"
] | null | null | null |
morepath/tests/test_view_directive.py
|
DuncanBetts/morepath
|
acad10489b051df9c512f6735a9338854745a599
|
[
"BSD-3-Clause"
] | null | null | null |
import morepath
from dectate import ConflictError
from webtest import TestApp as Client
from reg import ClassIndex, KeyIndex
import pytest
from morepath.core import request_method_predicate
def test_view_get_only():
class App(morepath.App):
pass
@App.path(path='')
class Model(object):
def __init__(self):
pass
@App.view(model=Model)
def default(self, request):
return "View"
c = Client(App())
response = c.get('/')
assert response.body == b'View'
response = c.post('/', status=405)
def test_view_name_conflict_involving_default():
class App(morepath.App):
pass
@App.path(path='')
class Model(object):
def __init__(self):
pass
@App.view(model=Model)
def default(self, request):
return "View"
@App.view(model=Model, name='')
def default2(self, request):
return "View"
with pytest.raises(ConflictError):
App.commit()
def test_view_custom_predicate_conflict_involving_default_extends():
class Core(morepath.App):
pass
class App(Core):
pass
@Core.predicate(morepath.App.get_view, name='extra', default='DEFAULT',
index=ClassIndex,
after=request_method_predicate)
def dummy_predicate(request):
return None
@App.path(path='')
class Model(object):
def __init__(self):
pass
@App.view(model=Model)
def default(self, request):
return "View"
@App.view(model=Model, extra='DEFAULT')
def default2(self, request):
return "View"
with pytest.raises(ConflictError):
App.commit()
def test_view_custom_predicate_without_fallback():
class Core(morepath.App):
pass
class App(Core):
pass
@Core.predicate(morepath.App.get_view, name='extra', default='DEFAULT',
index=KeyIndex,
after=request_method_predicate)
def dummy_predicate(self, obj, request):
return 'match'
@App.path(path='')
class Model(object):
def __init__(self):
pass
@App.view(model=Model, extra='match')
def default(self, request):
return "View"
@App.view(model=Model, name='foo', extra='not match')
def not_match(self, request):
return "Not match"
c = Client(App())
response = c.get('/')
assert response.body == b'View'
c.get('/foo', status=404)
| 22.160714
| 75
| 0.611604
| 294
| 2,482
| 5.003401
| 0.207483
| 0.079538
| 0.057104
| 0.080897
| 0.709041
| 0.702243
| 0.702243
| 0.64242
| 0.64242
| 0.64242
| 0
| 0.00441
| 0.269138
| 2,482
| 111
| 76
| 22.36036
| 0.806505
| 0
| 0
| 0.703704
| 0
| 0
| 0.040693
| 0
| 0
| 0
| 0
| 0
| 0.024691
| 1
| 0.209877
| false
| 0.123457
| 0.074074
| 0.111111
| 0.518519
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 7
|
c95714ed2d73b92d1af2c3285210b7418503710e
| 15,087
|
py
|
Python
|
tests/persistence/test_progress.py
|
pawelkopka/kopf
|
51a3a70e09a17cf3baec2946b64b125a90595cf4
|
[
"MIT"
] | null | null | null |
tests/persistence/test_progress.py
|
pawelkopka/kopf
|
51a3a70e09a17cf3baec2946b64b125a90595cf4
|
[
"MIT"
] | null | null | null |
tests/persistence/test_progress.py
|
pawelkopka/kopf
|
51a3a70e09a17cf3baec2946b64b125a90595cf4
|
[
"MIT"
] | null | null | null |
import copy
import datetime
from unittest.mock import Mock
import freezegun
import pytest
from kopf.reactor.states import HandlerOutcome, State, deliver_results
# Timestamps: time zero (0), before (B), after (A), and time zero+1s (1).
TSB = datetime.datetime(2020, 12, 31, 23, 59, 59, 000000)
TS0 = datetime.datetime(2020, 12, 31, 23, 59, 59, 123456)
TS1 = datetime.datetime(2021, 1, 1, 00, 00, 00, 123456)
TSA = datetime.datetime(2020, 12, 31, 23, 59, 59, 999999)
TSB_ISO = '2020-12-31T23:59:59.000000'
TS0_ISO = '2020-12-31T23:59:59.123456'
TS1_ISO = '2021-01-01T00:00:00.123456'
TSA_ISO = '2020-12-31T23:59:59.999999'
ZERO_DELTA = datetime.timedelta(seconds=0)
@pytest.fixture()
def handler():
return Mock(id='some-id', spec_set=['id'])
@freezegun.freeze_time(TS0)
def test_always_started_when_created_from_scratch(handler):
patch = {}
state = State.from_scratch(handlers=[handler])
state.store(patch=patch)
assert patch['status']['kopf']['progress']['some-id']['started'] == TS0_ISO
@pytest.mark.parametrize('expected, body', [
(TS0_ISO, {}),
(TS0_ISO, {'status': {}}),
(TS0_ISO, {'status': {'kopf': {}}}),
(TS0_ISO, {'status': {'kopf': {'progress': {}}}}),
(TS0_ISO, {'status': {'kopf': {'progress': {'some-id': {}}}}}),
(TS0_ISO, {'status': {'kopf': {'progress': {'some-id': {'started': None}}}}}),
(TS0_ISO, {'status': {'kopf': {'progress': {'some-id': {'started': TS0_ISO}}}}}),
(TSB_ISO, {'status': {'kopf': {'progress': {'some-id': {'started': TSB_ISO}}}}}),
(TSA_ISO, {'status': {'kopf': {'progress': {'some-id': {'started': TSA_ISO}}}}}),
])
@freezegun.freeze_time(TS0)
def test_always_started_when_created_from_body(handler, body, expected):
origbody = copy.deepcopy(body)
patch = {}
state = State.from_body(body=body, handlers=[handler])
state.store(patch=patch)
assert patch['status']['kopf']['progress']['some-id']['started'] == expected
assert body == origbody # not modified
@pytest.mark.parametrize('expected, body', [
(ZERO_DELTA, {}),
(ZERO_DELTA, {'status': {}}),
(ZERO_DELTA, {'status': {'kopf': {}}}),
(ZERO_DELTA, {'status': {'kopf': {'progress': {}}}}),
(ZERO_DELTA, {'status': {'kopf': {'progress': {'some-id': {}}}}}),
(ZERO_DELTA, {'status': {'kopf': {'progress': {'some-id': {'started': None}}}}}),
(ZERO_DELTA, {'status': {'kopf': {'progress': {'some-id': {'started': TS0_ISO}}}}}),
(TS0 - TSB, {'status': {'kopf': {'progress': {'some-id': {'started': TSB_ISO}}}}}),
(TS0 - TSA, {'status': {'kopf': {'progress': {'some-id': {'started': TSA_ISO}}}}}),
])
@freezegun.freeze_time(TS0)
def test_runtime(handler, expected, body):
origbody = copy.deepcopy(body)
state = State.from_body(body=body, handlers=[handler])
result = state[handler.id].runtime
assert result == expected
assert body == origbody # not modified
@pytest.mark.parametrize('expected, body', [
(False, {}),
(False, {'status': {}}),
(False, {'status': {'kopf': {}}}),
(False, {'status': {'kopf': {'progress': {}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'success': False}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'failure': False}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'success': None}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'failure': None}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'success': True}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'failure': True}}}}}),
])
def test_finished_flag(handler, expected, body):
origbody = copy.deepcopy(body)
state = State.from_body(body=body, handlers=[handler])
result = state[handler.id].finished
assert result == expected
assert body == origbody # not modified
@pytest.mark.parametrize('expected, body', [
# Everything that is finished is not sleeping, no matter the sleep/awake field.
(False, {'status': {'kopf': {'progress': {'some-id': {'success': True}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'failure': True}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'success': True, 'delayed': TS0_ISO}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'failure': True, 'delayed': TS0_ISO}}}}}),
# Everything with no sleep/awake field set is not sleeping either.
(False, {'status': {'kopf': {'progress': {'some-id': {}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'delayed': None}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'success': None}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'failure': None}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'success': None, 'delayed': None}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'failure': None, 'delayed': None}}}}}),
# When not finished and has awake time, the output depends on the relation to "now".
(False, {'status': {'kopf': {'progress': {'some-id': {'delayed': TS0_ISO}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'delayed': TS0_ISO, 'success': None}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'delayed': TS0_ISO, 'failure': None}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'delayed': TSB_ISO}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'delayed': TSB_ISO, 'success': None}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'delayed': TSB_ISO, 'failure': None}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'delayed': TSA_ISO}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'delayed': TSA_ISO, 'success': None}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'delayed': TSA_ISO, 'failure': None}}}}}),
])
@freezegun.freeze_time(TS0)
def test_sleeping_flag(handler, expected, body):
origbody = copy.deepcopy(body)
state = State.from_body(body=body, handlers=[handler])
result = state[handler.id].sleeping
assert result == expected
assert body == origbody # not modified
@pytest.mark.parametrize('expected, body', [
# Everything that is finished never awakens, no matter the sleep/awake field.
(False, {'status': {'kopf': {'progress': {'some-id': {'success': True}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'failure': True}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'success': True, 'delayed': TS0_ISO}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'failure': True, 'delayed': TS0_ISO}}}}}),
# Everything with no sleep/awake field is not sleeping, thus by definition is awake.
(True , {'status': {'kopf': {'progress': {'some-id': {}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'delayed': None}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'success': None}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'failure': None}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'success': None, 'delayed': None}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'failure': None, 'delayed': None}}}}}),
# When not finished and has awake time, the output depends on the relation to "now".
(True , {'status': {'kopf': {'progress': {'some-id': {'delayed': TS0_ISO}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'delayed': TS0_ISO, 'success': None}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'delayed': TS0_ISO, 'failure': None}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'delayed': TSB_ISO}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'delayed': TSB_ISO, 'success': None}}}}}),
(True , {'status': {'kopf': {'progress': {'some-id': {'delayed': TSB_ISO, 'failure': None}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'delayed': TSA_ISO}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'delayed': TSA_ISO, 'success': None}}}}}),
(False, {'status': {'kopf': {'progress': {'some-id': {'delayed': TSA_ISO, 'failure': None}}}}}),
])
@freezegun.freeze_time(TS0)
def test_awakened_flag(handler, expected, body):
origbody = copy.deepcopy(body)
state = State.from_body(body=body, handlers=[handler])
result = state[handler.id].awakened
assert result == expected
assert body == origbody # not modified
@pytest.mark.parametrize('expected, body', [
(None, {}),
(None, {'status': {}}),
(None, {'status': {'kopf': {}}}),
(None, {'status': {'kopf': {'progress': {}}}}),
(None, {'status': {'kopf': {'progress': {'some-id': {}}}}}),
(None, {'status': {'kopf': {'progress': {'some-id': {'delayed': None}}}}}),
(TS0, {'status': {'kopf': {'progress': {'some-id': {'delayed': TS0_ISO}}}}}),
])
def test_awakening_time(handler, expected, body):
origbody = copy.deepcopy(body)
state = State.from_body(body=body, handlers=[handler])
result = state[handler.id].delayed
assert result == expected
assert body == origbody # not modified
@pytest.mark.parametrize('expected, body', [
(0, {}),
(0, {'status': {}}),
(0, {'status': {'kopf': {'progress': {}}}}),
(0, {'status': {'kopf': {'progress': {'some-id': {}}}}}),
(0, {'status': {'kopf': {'progress': {'some-id': {'retries': None}}}}}),
(6, {'status': {'kopf': {'progress': {'some-id': {'retries': 6}}}}}),
])
def test_get_retry_count(handler, expected, body):
origbody = copy.deepcopy(body)
state = State.from_body(body=body, handlers=[handler])
result = state[handler.id].retries
assert result == expected
assert body == origbody # not modified
@pytest.mark.parametrize('body, delay, expected', [
({}, None, None),
({}, 0, TS0_ISO),
({}, 1, TS1_ISO),
])
@freezegun.freeze_time(TS0)
def test_set_awake_time(handler, expected, body, delay):
origbody = copy.deepcopy(body)
patch = {}
state = State.from_body(body=body, handlers=[handler])
state = state.with_outcomes(outcomes={handler.id: HandlerOutcome(final=False, delay=delay)})
state.store(patch=patch)
assert patch['status']['kopf']['progress']['some-id'].get('delayed') == expected
assert body == origbody # not modified
@pytest.mark.parametrize('expected_retries, expected_delayed, delay, body', [
(1, None, None, {}),
(1, TS0_ISO, 0, {}),
(1, TS1_ISO, 1, {}),
(1, None, None, {'status': {'kopf': {'progress': {'some-id': {'retries': None}}}}}),
(1, TS0_ISO, 0, {'status': {'kopf': {'progress': {'some-id': {'retries': None}}}}}),
(1, TS1_ISO, 1, {'status': {'kopf': {'progress': {'some-id': {'retries': None}}}}}),
(6, None, None, {'status': {'kopf': {'progress': {'some-id': {'retries': 5}}}}}),
(6, TS0_ISO, 0, {'status': {'kopf': {'progress': {'some-id': {'retries': 5}}}}}),
(6, TS1_ISO, 1, {'status': {'kopf': {'progress': {'some-id': {'retries': 5}}}}}),
])
@freezegun.freeze_time(TS0)
def test_set_retry_time(handler, expected_retries, expected_delayed, body, delay):
origbody = copy.deepcopy(body)
patch = {}
state = State.from_body(body=body, handlers=[handler])
state = state.with_outcomes(outcomes={handler.id: HandlerOutcome(final=False, delay=delay)})
state.store(patch=patch)
assert patch['status']['kopf']['progress']['some-id']['retries'] == expected_retries
assert patch['status']['kopf']['progress']['some-id']['delayed'] == expected_delayed
assert body == origbody # not modified
@pytest.mark.parametrize('expected_retries, expected_stopped, body', [
(1, TS0_ISO, {}),
(6, TS0_ISO, {'status': {'kopf': {'progress': {'some-id': {'retries': 5}}}}}),
])
@freezegun.freeze_time(TS0)
def test_store_failure(handler, expected_retries, expected_stopped, body):
error = Exception('some-error')
origbody = copy.deepcopy(body)
patch = {}
state = State.from_body(body=body, handlers=[handler])
state = state.with_outcomes(outcomes={handler.id: HandlerOutcome(final=True, exception=error)})
state.store(patch=patch)
assert patch['status']['kopf']['progress']['some-id']['success'] is False
assert patch['status']['kopf']['progress']['some-id']['failure'] is True
assert patch['status']['kopf']['progress']['some-id']['retries'] == expected_retries
assert patch['status']['kopf']['progress']['some-id']['stopped'] == expected_stopped
assert patch['status']['kopf']['progress']['some-id']['message'] == 'some-error'
assert body == origbody # not modified
@pytest.mark.parametrize('expected_retries, expected_stopped, body', [
(1, TS0_ISO, {}),
(6, TS0_ISO, {'status': {'kopf': {'progress': {'some-id': {'retries': 5}}}}}),
])
@freezegun.freeze_time(TS0)
def test_store_success(handler, expected_retries, expected_stopped, body):
origbody = copy.deepcopy(body)
patch = {}
state = State.from_body(body=body, handlers=[handler])
state = state.with_outcomes(outcomes={handler.id: HandlerOutcome(final=True)})
state.store(patch=patch)
assert patch['status']['kopf']['progress']['some-id']['success'] is True
assert patch['status']['kopf']['progress']['some-id']['failure'] is False
assert patch['status']['kopf']['progress']['some-id']['retries'] == expected_retries
assert patch['status']['kopf']['progress']['some-id']['stopped'] == expected_stopped
assert patch['status']['kopf']['progress']['some-id']['message'] is None
assert body == origbody # not modified
@pytest.mark.parametrize('result, expected_patch', [
(None, {}),
('string', {'status': {'some-id': 'string'}}),
({'field': 'value'}, {'status': {'some-id': {'field': 'value'}}}),
])
def test_store_result(handler, expected_patch, result):
patch = {}
outcomes = {handler.id: HandlerOutcome(final=True, result=result)}
deliver_results(outcomes=outcomes, patch=patch)
assert patch == expected_patch
def test_purge_progress_when_exists_in_body(handler):
body = {'status': {'kopf': {'progress': {'some-id': {'retries': 5}}}}}
patch = {}
origbody = copy.deepcopy(body)
state = State.from_body(body=body, handlers=[handler])
state.purge(patch=patch, body=body)
assert patch == {'status': {'kopf': {'progress': None}}}
assert body == origbody # not modified
def test_purge_progress_when_already_empty_in_body_and_patch(handler):
body = {}
patch = {}
origbody = copy.deepcopy(body)
state = State.from_body(body=body, handlers=[handler])
state.purge(patch=patch, body=body)
assert not patch
assert body == origbody # not modified
def test_purge_progress_when_already_empty_in_body_but_not_in__patch(handler):
body = {}
patch = {'status': {'kopf': {'progress': {'some-id': {'retries': 5}}}}}
origbody = copy.deepcopy(body)
state = State.from_body(body=body, handlers=[handler])
state.purge(patch=patch, body=body)
assert not patch
assert body == origbody # not modified
| 46.854037
| 100
| 0.602837
| 1,779
| 15,087
| 5.012367
| 0.074761
| 0.10766
| 0.185713
| 0.212179
| 0.845576
| 0.829539
| 0.79511
| 0.756645
| 0.69261
| 0.640686
| 0
| 0.019169
| 0.1494
| 15,087
| 321
| 101
| 47
| 0.67568
| 0.047789
| 0
| 0.472119
| 0
| 0
| 0.24261
| 0.00725
| 0
| 0
| 0
| 0
| 0.144981
| 1
| 0.063197
| false
| 0
| 0.022305
| 0.003717
| 0.089219
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c979b90c8ac7db83b2a97f3715cb17a9cd239cc2
| 349
|
py
|
Python
|
test/test_beams.py
|
ebisim/ebisim
|
7197767a4d69fa1f7d5f0582eaf8f35c30f0b1f3
|
[
"MIT"
] | 2
|
2021-03-11T11:01:18.000Z
|
2021-03-12T11:58:20.000Z
|
test/test_beams.py
|
ebisim/ebisim
|
7197767a4d69fa1f7d5f0582eaf8f35c30f0b1f3
|
[
"MIT"
] | 14
|
2019-06-03T14:56:55.000Z
|
2021-07-21T20:01:27.000Z
|
test/test_beams.py
|
HPLegion/EBISSimulation
|
7197767a4d69fa1f7d5f0582eaf8f35c30f0b1f3
|
[
"MIT"
] | 1
|
2019-03-13T13:13:05.000Z
|
2019-03-13T13:13:05.000Z
|
"""
Tests for ebisim.beams
"""
#TODO: extend test suite
import ebisim.beams
import pytest
def test_electron_velocity():
assert ebisim.beams.electron_velocity(3e3) == pytest.approx(3.234302e7)
assert ebisim.beams.electron_velocity(3e4) == pytest.approx(9.84447e7)
assert ebisim.beams.electron_velocity(3e5) == pytest.approx(2.327965e8)
| 26.846154
| 75
| 0.759312
| 47
| 349
| 5.531915
| 0.510638
| 0.211538
| 0.196154
| 0.288462
| 0.380769
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094156
| 0.117479
| 349
| 12
| 76
| 29.083333
| 0.75
| 0.131805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.5
| 1
| 0.166667
| true
| 0
| 0.333333
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
a34cd1ef8e8d3ce6e617e18be26b8678e5e15f39
| 1,813
|
py
|
Python
|
llorens/day06/Day06Solver.py
|
CloudCoders/AdventOfCode2017
|
5a52d1e89076eccb55686e4af5848de289309813
|
[
"MIT"
] | 8
|
2017-12-11T18:22:52.000Z
|
2017-12-13T00:50:24.000Z
|
llorens/day06/Day06Solver.py
|
CloudCoders/AdventOfCode2017
|
5a52d1e89076eccb55686e4af5848de289309813
|
[
"MIT"
] | 8
|
2017-12-01T14:31:29.000Z
|
2017-12-07T21:43:43.000Z
|
llorens/day06/Day06Solver.py
|
CloudCoders/AdventOfCode2017
|
5a52d1e89076eccb55686e4af5848de289309813
|
[
"MIT"
] | null | null | null |
def solve_part_1(sequence):
sequence = sequence.split("\t")
historic = [sequence[:]]
block_redistribution = 0
sequence_size = len(sequence)
for i in range(sequence_size):
sequence[i] = int(sequence[i])
while True:
maximum = -1
index = -1
for i in range(sequence_size):
if sequence[i] > maximum:
maximum = sequence[i]
index = i
sequence[index] = 0
increase = int(maximum/sequence_size)
res = maximum % sequence_size
for i in range(1, sequence_size+1):
if i <= res:
sequence[(index + i) % sequence_size] += increase + 1
else:
sequence[(index + i) % sequence_size] += increase
if sequence in historic:
return len(historic)
else:
historic.append(sequence[:])
def solve_part_2(sequence):
sequence = sequence.split("\t")
historic = [sequence[:]]
block_redistribution = 0
sequence_size = len(sequence)
for i in range(sequence_size):
sequence[i] = int(sequence[i])
while True:
maximum = -1
index = -1
for i in range(sequence_size):
if sequence[i] > maximum:
maximum = sequence[i]
index = i
sequence[index] = 0
increase = int(maximum/sequence_size)
res = maximum % sequence_size
for i in range(1, sequence_size+1):
if i <= res:
sequence[(index + i) % sequence_size] += increase + 1
else:
sequence[(index + i) % sequence_size] += increase
if sequence in historic:
return len(historic) - historic.index(sequence)
else:
historic.append(sequence[:])
| 24.835616
| 69
| 0.53668
| 199
| 1,813
| 4.778894
| 0.145729
| 0.201893
| 0.037855
| 0.069401
| 0.895899
| 0.895899
| 0.895899
| 0.895899
| 0.895899
| 0.895899
| 0
| 0.013805
| 0.360728
| 1,813
| 73
| 70
| 24.835616
| 0.80673
| 0
| 0
| 0.923077
| 0
| 0
| 0.002205
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a3562139dc764ad1b891a07cb37a974ee0ca4b21
| 4,576
|
py
|
Python
|
apps/strategy/testcases/integration/tests_outerstrategy.py
|
ycheng-aa/gated_launch_backend
|
cbb9e7e530ab28d5914276e9607ebfcf84be6433
|
[
"MIT"
] | null | null | null |
apps/strategy/testcases/integration/tests_outerstrategy.py
|
ycheng-aa/gated_launch_backend
|
cbb9e7e530ab28d5914276e9607ebfcf84be6433
|
[
"MIT"
] | null | null | null |
apps/strategy/testcases/integration/tests_outerstrategy.py
|
ycheng-aa/gated_launch_backend
|
cbb9e7e530ab28d5914276e9607ebfcf84be6433
|
[
"MIT"
] | null | null | null |
from rest_framework import status
from apps.common.tests import BaseTestCase
from apps.auth.models import User
from apps.app.models import App
class OuterStrategyTestCase(BaseTestCase):
'''
外灰策略单元测试
'''
def setUp(self):
self.normal_user = User.objects.get(username='normal_user')
self.admin_user = User.objects.get(username='admin_user')
# app1的app owner
self.app1_owner_user = User.objects.get(username='app_owner_user')
# app2的app owner
self.app2_owner_user = User.objects.get(username='test_app_owner')
# app1
self.app = App.objects.get(pk=1)
# 初始化Testcase所需参数
self.test_data = {"name": "unit test outerstrategy",
"appId": 1,
"allowUsers": "4,5,6",
"isCompatible": 0,
"frequency": 2}
self.super_user = self.app1_owner_user
self.for_list_url = 'outerStrategies-list'
self.for_detail_url = 'outerStrategies-detail'
# get test cases
def test_normal_user_can_not_get_outerstrategy(self):
self.do_test_user_get_permission(self.normal_user, status.HTTP_403_FORBIDDEN)
def test_admin_user_can_not_get_outerstrategy(self):
self.do_test_user_get_permission(self.admin_user, status.HTTP_403_FORBIDDEN)
def test_app_owner_user_can_get_outerstrategy(self):
self.do_test_user_get_permission(self.app1_owner_user, status.HTTP_200_OK)
def test_other_app_user_can_not_get_outerstrategy(self):
self.do_test_user_get_permission(self.app2_owner_user, status.HTTP_403_FORBIDDEN)
# patch test cases
def test_normal_user_can_not_patch_outerstrategy(self):
self.do_test_user_patch_permission(self.normal_user, {"name": "modified unit test outerstrategy"}, status.HTTP_403_FORBIDDEN)
def test_admin_user_can_patch_outerstrategy(self):
self.do_test_user_patch_permission(self.admin_user, {"name": "modified unit test outerstrategy"}, status.HTTP_403_FORBIDDEN)
def test_app_owner_user_can_patch_outerstrategy(self):
self.do_test_user_patch_permission(self.app1_owner_user, {"name": "modified unit test outerstrategy"}, status.HTTP_200_OK)
def test_other_app_user_can_patch_outerstrategy(self):
self.do_test_user_patch_permission(self.app2_owner_user, {"name": "modified unit test outerstrategy"}, status.HTTP_403_FORBIDDEN)
# create test cases
def test_normal_user_can_not_create_outerstrategy(self):
self.do_test_user_create_permission(self.normal_user, status.HTTP_403_FORBIDDEN)
def test_admin_user_can_not_create_outerstrategy(self):
self.do_test_user_create_permission(self.admin_user, status.HTTP_403_FORBIDDEN)
def test_app_owner_user_can_create_outerstrategy(self):
self.do_test_user_create_permission(self.app1_owner_user, status.HTTP_200_OK)
def test_other_app_owner_user_can_not_create_outerstrategy(self):
self.do_test_user_create_permission(self.app2_owner_user, status.HTTP_403_FORBIDDEN)
# delete test cases
def test_normarl_user_can_not_delete_outerstrategy(self):
self.do_test_user_delete_permission(user=self.normal_user,
expect_status=status.HTTP_200_OK,
extra_verify=lambda res: self.assertEqual(res.data.get('msg', {}), '1: You do not have permission to perform this action.'))
def test_admin_user_can_not_delete_outerstrategy(self):
self.do_test_user_delete_permission(user=self.admin_user,
expect_status=status.HTTP_200_OK,
extra_verify=lambda res: self.assertEqual(res.data.get('msg', {}), '1: You do not have permission to perform this action.'))
def test_app_owner_user_can_delete_outerstrategy(self):
self.do_test_user_delete_permission(user=self.app1_owner_user,
expect_status=status.HTTP_200_OK,
extra_verify=lambda res: self.assertEqual(res.data.get('msg', {}), u'成功'))
def test_other_app_owner_user_can_not_delete_outerstrategy(self):
self.do_test_user_delete_permission(user=self.app2_owner_user,
expect_status=status.HTTP_200_OK,
extra_verify=lambda res: self.assertEqual(res.data.get('msg', {}), '1: You do not have permission to perform this action.'))
| 47.175258
| 168
| 0.693837
| 599
| 4,576
| 4.884808
| 0.138564
| 0.055366
| 0.114833
| 0.125769
| 0.810663
| 0.781613
| 0.753589
| 0.753589
| 0.71907
| 0.677375
| 0
| 0.020293
| 0.22465
| 4,576
| 96
| 169
| 47.666667
| 0.804397
| 0.027972
| 0
| 0.118644
| 0
| 0
| 0.107571
| 0.004972
| 0
| 0
| 0
| 0
| 0.067797
| 1
| 0.288136
| false
| 0
| 0.067797
| 0
| 0.372881
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3819414b1c70303bff9c1ad2c2738e09382cf24
| 2,170
|
py
|
Python
|
reo/migrations/0144_auto_20211210_2306.py
|
NREL/REopt_API
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 7
|
2022-01-29T12:10:10.000Z
|
2022-03-28T13:45:20.000Z
|
reo/migrations/0144_auto_20211210_2306.py
|
NREL/reopt_api
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 12
|
2022-02-01T18:23:18.000Z
|
2022-03-31T17:22:17.000Z
|
reo/migrations/0144_auto_20211210_2306.py
|
NREL/REopt_API
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 3
|
2022-02-08T19:44:40.000Z
|
2022-03-12T11:05:36.000Z
|
# Generated by Django 3.1.13 on 2021-12-10 23:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reo', '0143_auto_20211209_1922'),
]
operations = [
migrations.RenameField(
model_name='sitemodel',
old_name='year_one_renewable_electricity_kwh',
new_name='annual_renewable_electricity_kwh',
),
migrations.RenameField(
model_name='sitemodel',
old_name='year_one_renewable_electricity_kwh_bau',
new_name='annual_renewable_electricity_kwh_bau',
),
migrations.RenameField(
model_name='sitemodel',
old_name='year_one_renewable_electricity_pct',
new_name='annual_renewable_electricity_pct',
),
migrations.RenameField(
model_name='sitemodel',
old_name='year_one_renewable_electricity_pct_bau',
new_name='annual_renewable_electricity_pct_bau',
),
migrations.RenameField(
model_name='sitemodel',
old_name='year_one_renewable_heat_pct',
new_name='annual_total_renewable_energy_pct',
),
migrations.RenameField(
model_name='sitemodel',
old_name='year_one_renewable_heat_pct_bau',
new_name='annual_total_renewable_energy_pct_bau',
),
migrations.RemoveField(
model_name='sitemodel',
name='year_one_electricity_kwh',
),
migrations.RemoveField(
model_name='sitemodel',
name='year_one_electricity_kwh_bau',
),
migrations.RemoveField(
model_name='sitemodel',
name='year_one_heat_load_mmbtu',
),
migrations.RemoveField(
model_name='sitemodel',
name='year_one_heat_load_mmbtu_bau',
),
migrations.RemoveField(
model_name='sitemodel',
name='year_one_renewable_heat_mmbtu',
),
migrations.RemoveField(
model_name='sitemodel',
name='year_one_renewable_heat_mmbtu_bau',
),
]
| 31.911765
| 62
| 0.607373
| 211
| 2,170
| 5.763033
| 0.21327
| 0.088816
| 0.177632
| 0.131579
| 0.881579
| 0.879112
| 0.758224
| 0.70148
| 0.70148
| 0.694079
| 0
| 0.021178
| 0.303687
| 2,170
| 67
| 63
| 32.38806
| 0.783587
| 0.021198
| 0
| 0.590164
| 1
| 0
| 0.333648
| 0.281338
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016393
| 0
| 0.065574
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6e62d838d0f01262da3f452262892e0938627cc4
| 40,852
|
py
|
Python
|
test_sort.py
|
imacube/sorting-pictures
|
0dad1b5c6ed43a2adb1e7be88f399d207ef95e4d
|
[
"MIT"
] | null | null | null |
test_sort.py
|
imacube/sorting-pictures
|
0dad1b5c6ed43a2adb1e7be88f399d207ef95e4d
|
[
"MIT"
] | null | null | null |
test_sort.py
|
imacube/sorting-pictures
|
0dad1b5c6ed43a2adb1e7be88f399d207ef95e4d
|
[
"MIT"
] | null | null | null |
"""Tests for sort.py."""
import shutil
from argparse import Namespace
from datetime import datetime
from pathlib import PosixPath, Path
from unittest.mock import patch, call
import pytest
from sort import SortingPictures
@pytest.fixture
def sorting_pictures():
return SortingPictures()
@pytest.fixture
def namespace():
return Namespace(move=False, collisions=False, suffix=False, parse=False,
exif=False, google_json=False,
dry_run=False, paths='src dest'.split())
class TestParseArguments:
def test_basic(self, sorting_pictures, namespace):
parser = sorting_pictures.parse_arguments()
args = parser.parse_args('src dest'.split())
assert args == namespace
def test_move(self, sorting_pictures, namespace):
parser = sorting_pictures.parse_arguments()
args = parser.parse_args('--move src dest'.split())
namespace.move = True
assert args == namespace
def test_many_sources(self, sorting_pictures, namespace):
parser = sorting_pictures.parse_arguments()
args = parser.parse_args('src0 src1 src2 src3 dest'.split())
namespace.paths = 'src0 src1 src2 src3 dest'.split()
assert args == Namespace(move=False, collisions=False, suffix=False, parse=False, exif=False, google_json=False,
dry_run=False, paths=['src0', 'src1', 'src2', 'src3', 'dest'])
def test_collisions(self, sorting_pictures, namespace):
parser = sorting_pictures.parse_arguments()
args = parser.parse_args('--collisions src dest'.split())
namespace.collisions = True
assert args == namespace
def test_suffix(self, sorting_pictures, namespace):
parser = sorting_pictures.parse_arguments()
args = parser.parse_args('--suffix src dest'.split())
namespace.suffix = True
assert args == namespace
def test_exif(self, sorting_pictures, namespace):
parser = sorting_pictures.parse_arguments()
args = parser.parse_args('--exif src dest'.split())
namespace.exif = True
assert args == namespace
def test_google_json(self, sorting_pictures, namespace):
parser = sorting_pictures.parse_arguments()
args = parser.parse_args('--google-json src dest'.split())
namespace.google_json = True
assert args == namespace
def test_dry_run(self, sorting_pictures, namespace):
parser = sorting_pictures.parse_arguments()
args = parser.parse_args('--dry-run src dest'.split())
namespace.dry_run = True
assert args == namespace
parser = sorting_pictures.parse_arguments()
args = parser.parse_args('--dry-run src dest'.split())
namespace.dry_run = True
assert args == namespace
class TestGetGoogleJsonDate:
def test_good_file(self, sorting_pictures):
actual = sorting_pictures.get_google_json_date(Path('sample-images/a6a5e930cac831ef4e00255c51872867.jpg'))
expected = datetime(year=2021, month=3, day=17, hour=11, minute=42, second=42)
assert actual == expected
def test_bad_file(self, sorting_pictures):
actual = sorting_pictures.get_google_json_date(Path('sample-images/not_image_name.jpg'))
assert actual is None
class TestGetDateFromFile:
def test_get_datetime(self, sorting_pictures):
actual = sorting_pictures.get_date_from_filename(Path('IMG_20171022_124203.jpg'))
expected = datetime(year=2017, month=10, day=22, hour=12, minute=42, second=3)
assert actual == expected
def test_image_duplicate(self, sorting_pictures):
actual = sorting_pictures.get_date_from_filename(Path('IMG_20171104_104157_01.jpg'))
expected = datetime(year=2017, month=11, day=4, hour=10, minute=41, second=57)
assert actual == expected
def test_screenshot(self, sorting_pictures):
actual = sorting_pictures.get_date_from_filename(Path('Screenshot_20171007-143321.png'))
expected = datetime(year=2017, month=10, day=7, hour=14, minute=33, second=21)
assert actual == expected
def test_video(self, sorting_pictures):
actual = sorting_pictures.get_date_from_filename(Path('VID_20180724_173611.mp4'))
expected = datetime(year=2018, month=7, day=24, hour=17, minute=36, second=11)
assert actual == expected
def test_failure(self, sorting_pictures):
actual = sorting_pictures.get_date_from_filename(Path('metadata.jpg'))
assert actual is None
def test_image_tilde(self, sorting_pictures):
actual = sorting_pictures.get_date_from_filename(Path('IMG_20171022_124203~2.jpg'))
expected = datetime(year=2017, month=10, day=22, hour=12, minute=42, second=3)
assert actual == expected
def test_image_metadata(self, sorting_pictures):
actual = sorting_pictures.get_date_from_exif(Path('sample-images/metadata.jpg'))
expected = datetime(year=2022, month=2, day=27, hour=12, minute=9, second=35)
assert actual == expected
class TestIsFile:
def test_file(self, sorting_pictures):
assert sorting_pictures.is_file('sample-images/metadata.jpg')
def test_symlink(self, sorting_pictures):
assert not sorting_pictures.is_file('sample-images/no-m')
assert not sorting_pictures.is_file('sample-images/VID')
def test_dir(self, sorting_pictures):
assert not sorting_pictures.is_file('sample-images')
def test_other(self, sorting_pictures):
assert not sorting_pictures.is_file('/dev/null')
class TestSearchDirectory:
def test_dir(self, sorting_pictures):
result = sorting_pictures.search_directory('sample-images')
assert sorted(result) == sorted(
[
PosixPath('sample-images/IMG_20171022_010203_01.jpg'),
PosixPath('sample-images/IMG_20171022_124203.unknown_suffix'),
PosixPath('sample-images/IMG_NO_PARSE.jpg'),
PosixPath('sample-images/VID'),
PosixPath('sample-images/a6a5e930cac831ef4e00255c51872867.jpg.json'),
PosixPath('sample-images/metadata-copy.jpg'),
PosixPath('sample-images/metadata.jpg'),
PosixPath('sample-images/no-m'),
PosixPath('sample-images/no-metadata'),
PosixPath('sample-images/no-metadata/20170112_110943-ANIMATION.gif'),
PosixPath('sample-images/no-metadata/20171022_010203.jpg'),
PosixPath('sample-images/no-metadata/IMG_20171022_124203.jpg'),
PosixPath('sample-images/no-metadata/IMG_20171022_124203_01.jpg'),
PosixPath('sample-images/no-metadata/IMG_20171104_104157.jpg'),
PosixPath('sample-images/no-metadata/IMG_20171104_104157_01.jpg'),
PosixPath('sample-images/no-metadata/IMG_20171104_104158~.jpg'),
PosixPath('sample-images/no-metadata/IMG_20181001_124203.gif'),
PosixPath('sample-images/no-metadata/IMG~20171104~104159~.jpg'),
PosixPath('sample-images/no-metadata/Screenshot_20171007-143321.png'),
PosixPath('sample-images/no-metadata/VID_20180724_173611.mp4'),
PosixPath('sample-images/no-metadata.jpg'),
PosixPath('sample-images/not_image_name.jpg.json'),
]
)
class TestDiffFiles:
def test_same_hash(self, sorting_pictures):
assert sorting_pictures.diff_files('sample-images/metadata.jpg', 'sample-images/metadata-copy.jpg') is True
def test_different_hash(self, sorting_pictures):
assert sorting_pictures.diff_files('sample-images/metadata.jpg', 'sample-images/no-metadata.jpg') is False
class TestMoveFile:
def test_copy_file(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
src.mkdir(parents=True, exist_ok=True)
src_file = src / 'metadata.jpg'
shutil.copy2('sample-images/metadata.jpg', src_file)
dest_file = tmp_path / 'dest' / 'metadata-dest.jpg'
assert not dest_file.exists()
assert sorting_pictures.move_file(src_file, dest_file) is True
assert dest_file.exists()
def test_move_file(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
src.mkdir(parents=True, exist_ok=True)
src_file = src / 'metadata.jpg'
shutil.copy2('sample-images/metadata.jpg', src_file)
dest_file = tmp_path / 'dest' / 'metadata-dest.jpg'
assert src_file.exists()
assert not dest_file.exists()
assert sorting_pictures.move_file(src_file, dest_file, move=True) is True
assert not src_file.exists()
assert dest_file.exists()
def test_file_exists_same_hash(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
src.mkdir(parents=True, exist_ok=True)
src_file = src / 'metadata.jpg'
shutil.copy2('sample-images/metadata.jpg', src_file)
dest_file = tmp_path / 'dest' / 'metadata-dest.jpg'
assert not dest_file.exists()
sorting_pictures.move_file(src_file, dest_file)
assert dest_file.exists()
assert sorting_pictures.move_file(src_file, dest_file) is True
def test_file_exists_different_hash(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
dest = tmp_path / 'dest'
if src.exists():
shutil.rmtree(src)
if dest.exists():
shutil.rmtree(dest)
src.mkdir(parents=True, exist_ok=True)
src_file = src / 'metadata.jpg'
dest_file = dest / 'metadata-dest.jpg'
shutil.copy2('sample-images/metadata.jpg', src_file)
assert not dest_file.exists()
sorting_pictures.move_file(src_file, dest_file)
assert dest_file.exists()
shutil.copy2('sample-images/no-metadata.jpg', src_file)
assert sorting_pictures.move_file(src_file, dest_file) is True
assert (dest_file.parent / ('%s-%d%s' % (dest_file.stem, 1, dest_file.suffix))).exists()
assert sorting_pictures.move_file(src_file, dest_file) is True
shutil.copy2('sample-images/no-metadata/IMG_20171022_124203_01.jpg', src_file)
assert sorting_pictures.move_file(src_file, dest_file) is True
assert (dest_file.parent / ('%s-%d%s' % (dest_file.stem, 2, dest_file.suffix))).exists()
assert sorting_pictures.move_file(src_file, dest_file) is True
def test_src_file_is_dir(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
src.mkdir(parents=True, exist_ok=True)
src_file = src / 'metadata.jpg'
src_file.mkdir()
dest_file = tmp_path / 'dest' / 'metadata-dest.jpg'
assert src_file.is_dir()
assert not dest_file.exists()
sorting_pictures.move_file(src_file, dest_file)
assert not dest_file.exists()
def test_dest_file_is_dir(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
src.mkdir(parents=True, exist_ok=True)
src_file = src / 'metadata.jpg'
shutil.copy2('sample-images/metadata.jpg', src_file)
dest = tmp_path / 'dest'
dest.mkdir(parents=True, exist_ok=True)
assert dest.is_dir()
assert not sorting_pictures.move_file(src_file, dest)
def test_src_file_is_symlink(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
src.mkdir(parents=True, exist_ok=True)
src_file = src / 'metadata.jpg'
shutil.copy2('sample-images/metadata.jpg', src_file)
src_file = src / 'metadata-symlink.jpg'
src_file.symlink_to(src / 'metadata.jpg')
dest_file = tmp_path / 'dest' / 'metadata-dest.jpg'
assert src_file.is_symlink()
assert not dest_file.exists()
sorting_pictures.move_file(src_file, dest_file)
assert not dest_file.exists()
def test_dest_file_is_symlink(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
src.mkdir(parents=True, exist_ok=True)
src_file = src / 'metadata.jpg'
shutil.copy2('sample-images/metadata.jpg', src_file)
dest = tmp_path / 'dest'
dest.mkdir(parents=True, exist_ok=True)
dest = dest / 'metadata-dest.jpg'
shutil.copy2('sample-images/metadata.jpg', dest)
dest_symlink = tmp_path / 'dest' / 'metadata-symlink.jpg'
dest_symlink.symlink_to(dest)
assert dest.is_file()
assert dest_symlink.is_symlink()
assert not sorting_pictures.move_file(src_file, dest_symlink)
class TestSortImages:
def test_successful_run_copy(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
dest = tmp_path / 'dest'
if src.exists():
shutil.rmtree(src)
if dest.exists():
shutil.rmtree(dest)
shutil.copytree('sample-images', src, symlinks=True)
sorting_pictures.sort_images(src, dest)
result = sorting_pictures.search_directory(dest)
result = [p.relative_to(tmp_path) for p in result]
assert sorted(result) == sorted([PosixPath('dest/2017-01'),
PosixPath('dest/2017-01/IMG_20170112_110943.gif'),
PosixPath('dest/2017-10'),
PosixPath('dest/2017-10/IMG_20171007_143321.png'),
PosixPath('dest/2017-10/IMG_20171022_010203.jpg'),
PosixPath('dest/2017-10/IMG_20171022_124203-1.jpg'),
PosixPath('dest/2017-10/IMG_20171022_124203.jpg'),
PosixPath('dest/2017-11'),
PosixPath('dest/2017-11/IMG_20171104_104157.jpg'),
PosixPath('dest/2017-11/IMG_20171104_104158.jpg'),
PosixPath('dest/2017-11/IMG_20171104_104159.jpg'),
PosixPath('dest/2018-07'),
PosixPath('dest/2018-07/VID_20180724_173611.mp4'),
PosixPath('dest/2018-10'),
PosixPath('dest/2018-10/IMG_20181001_124203.gif')])
log = sorting_pictures.log
log['parse'] = [p.relative_to(tmp_path) for p in log['parse']]
log['suffix'] = [p.relative_to(tmp_path) for p in log['suffix']]
log['collisions'] = [(p_s.relative_to(tmp_path), p_d.relative_to(tmp_path)) for (p_s, p_d) in log['collisions']]
log['exif'] = [p.relative_to(tmp_path) for p in log['exif']]
log['google_json_date'] = [p.relative_to(tmp_path) for p in log['google_json_date']]
expected = {'collisions': [(PosixPath('src/IMG_20171022_010203_01.jpg'),
PosixPath('dest/2017-10/IMG_20171022_010203.jpg'))],
'exif': [],
'google_json_date': [],
'parse': [PosixPath('src/metadata-copy.jpg'),
PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/no-metadata.jpg'),
PosixPath('src/metadata.jpg'), ],
'suffix': [PosixPath('src/VID'),
PosixPath('src/IMG_20171022_124203.unknown_suffix'),
PosixPath('src/a6a5e930cac831ef4e00255c51872867.jpg.json'),
PosixPath('src/not_image_name.jpg.json'),
]}
log = {k: sorted(v) for k, v in log.items()}
expected = {k: sorted(v) for k, v in expected.items()}
assert log == expected
def test_successful_run_move(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
dest = tmp_path / 'dest'
if src.exists():
shutil.rmtree(src)
if dest.exists():
shutil.rmtree(dest)
shutil.copytree('sample-images', src, symlinks=True)
sorting_pictures.sort_images(src, dest, move=True)
result = sorting_pictures.search_directory(dest)
result = [p.relative_to(tmp_path) for p in result]
assert sorted(result) == sorted([PosixPath('dest/2017-01'),
PosixPath('dest/2017-01/IMG_20170112_110943.gif'),
PosixPath('dest/2017-10'),
PosixPath('dest/2017-10/IMG_20171007_143321.png'),
PosixPath('dest/2017-10/IMG_20171022_010203.jpg'),
PosixPath('dest/2017-10/IMG_20171022_124203-1.jpg'),
PosixPath('dest/2017-10/IMG_20171022_124203.jpg'),
PosixPath('dest/2017-11'),
PosixPath('dest/2017-11/IMG_20171104_104157.jpg'),
PosixPath('dest/2017-11/IMG_20171104_104158.jpg'),
PosixPath('dest/2017-11/IMG_20171104_104159.jpg'),
PosixPath('dest/2018-07'),
PosixPath('dest/2018-07/VID_20180724_173611.mp4'),
PosixPath('dest/2018-10'),
PosixPath('dest/2018-10/IMG_20181001_124203.gif')])
result = sorting_pictures.search_directory(src)
result = [p.relative_to(tmp_path) for p in result]
assert sorted(result) == sorted([PosixPath('src/metadata-copy.jpg'),
PosixPath('src/IMG_20171022_010203_01.jpg'),
PosixPath('src/VID'),
PosixPath('src/IMG_20171022_124203.unknown_suffix'),
PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/no-m'),
PosixPath('src/no-metadata.jpg'),
PosixPath('src/metadata.jpg'),
PosixPath('src/no-metadata'),
PosixPath('src/a6a5e930cac831ef4e00255c51872867.jpg.json'),
PosixPath('src/not_image_name.jpg.json'),
])
log = sorting_pictures.log
log['parse'] = [p.relative_to(tmp_path) for p in log['parse']]
log['suffix'] = [p.relative_to(tmp_path) for p in log['suffix']]
log['collisions'] = [(p_s.relative_to(tmp_path), p_d.relative_to(tmp_path)) for (p_s, p_d) in log['collisions']]
log['exif'] = [p.relative_to(tmp_path) for p in log['exif']]
log['google_json_date'] = [p.relative_to(tmp_path) for p in log['google_json_date']]
expected = {'collisions': [(PosixPath('src/IMG_20171022_010203_01.jpg'),
PosixPath('dest/2017-10/IMG_20171022_010203.jpg'))],
'exif': [],
'google_json_date': [],
'parse': [PosixPath('src/metadata-copy.jpg'),
PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/no-metadata.jpg'),
PosixPath('src/metadata.jpg'), ],
'suffix': [PosixPath('src/VID'),
PosixPath('src/IMG_20171022_124203.unknown_suffix'),
PosixPath('src/a6a5e930cac831ef4e00255c51872867.jpg.json'),
PosixPath('src/not_image_name.jpg.json'),
]}
log = {k: sorted(v) for k, v in log.items()}
expected = {k: sorted(v) for k, v in expected.items()}
assert log == expected
def test_successful_run_exif(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
dest = tmp_path / 'dest'
if src.exists():
shutil.rmtree(src)
if dest.exists():
shutil.rmtree(dest)
shutil.copytree('sample-images', src, symlinks=True)
sorting_pictures.sort_images(src, dest, move=True, exif=True)
result = sorting_pictures.search_directory(dest)
result = [p.relative_to(tmp_path) for p in result]
assert sorted(result) == sorted(
[
PosixPath('dest/2017-01'),
PosixPath('dest/2017-01/IMG_20170112_110943.gif'),
PosixPath('dest/2017-10'),
PosixPath('dest/2017-10/IMG_20171007_143321.png'),
PosixPath('dest/2017-10/IMG_20171022_010203.jpg'),
PosixPath('dest/2017-10/IMG_20171022_124203-1.jpg'),
PosixPath('dest/2017-10/IMG_20171022_124203.jpg'),
PosixPath('dest/2017-11'),
PosixPath('dest/2017-11/IMG_20171104_104157.jpg'),
PosixPath('dest/2017-11/IMG_20171104_104158.jpg'),
PosixPath('dest/2017-11/IMG_20171104_104159.jpg'),
PosixPath('dest/2018-07'),
PosixPath('dest/2018-07/VID_20180724_173611.mp4'),
PosixPath('dest/2018-10'),
PosixPath('dest/2018-10/IMG_20181001_124203.gif'),
PosixPath('dest/2022-02'),
PosixPath('dest/2022-02/IMG_20220227_120935.jpg'),
]
)
result = sorting_pictures.search_directory(src)
result = [p.relative_to(tmp_path) for p in result]
assert sorted(result) == sorted(
[
PosixPath('src/IMG_20171022_010203_01.jpg'),
PosixPath('src/VID'),
PosixPath('src/IMG_20171022_124203.unknown_suffix'),
PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/no-m'),
PosixPath('src/no-metadata.jpg'),
PosixPath('src/no-metadata'),
PosixPath('src/a6a5e930cac831ef4e00255c51872867.jpg.json'),
PosixPath('src/not_image_name.jpg.json'),
])
log = sorting_pictures.log
log['parse'] = [p.relative_to(tmp_path) for p in log['parse']]
log['suffix'] = [p.relative_to(tmp_path) for p in log['suffix']]
log['collisions'] = [(p_s.relative_to(tmp_path), p_d.relative_to(tmp_path)) for (p_s, p_d) in log['collisions']]
log['exif'] = [p.relative_to(tmp_path) for p in log['exif']]
log['google_json_date'] = [p.relative_to(tmp_path) for p in log['google_json_date']]
expected = {'collisions': [(PosixPath('src/IMG_20171022_010203_01.jpg'),
PosixPath('dest/2017-10/IMG_20171022_010203.jpg'))],
'exif': [PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/no-metadata.jpg')],
'google_json_date': [],
'parse': [PosixPath('src/metadata-copy.jpg'),
PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/no-metadata.jpg'),
PosixPath('src/metadata.jpg'), ],
'suffix': [PosixPath('src/VID'),
PosixPath('src/IMG_20171022_124203.unknown_suffix'),
PosixPath('src/a6a5e930cac831ef4e00255c51872867.jpg.json'),
PosixPath('src/not_image_name.jpg.json'),
]}
log = {k: sorted(v) for k, v in log.items()}
expected = {k: sorted(v) for k, v in expected.items()}
assert log == expected
def test_successful_run_google_json_date(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
dest = tmp_path / 'dest'
if src.exists():
shutil.rmtree(src)
if dest.exists():
shutil.rmtree(dest)
shutil.copytree('sample-images', src, symlinks=True)
sorting_pictures.sort_images(src, dest, move=True, google_json_date=True)
result = sorting_pictures.search_directory(dest)
result = [p.relative_to(tmp_path) for p in result]
assert sorted(result) == sorted(
[
PosixPath('dest/2017-01'),
PosixPath('dest/2017-01/IMG_20170112_110943.gif'),
PosixPath('dest/2017-10'),
PosixPath('dest/2017-10/IMG_20171007_143321.png'),
PosixPath('dest/2017-10/IMG_20171022_010203.jpg'),
PosixPath('dest/2017-10/IMG_20171022_124203-1.jpg'),
PosixPath('dest/2017-10/IMG_20171022_124203.jpg'),
PosixPath('dest/2017-11'),
PosixPath('dest/2017-11/IMG_20171104_104157.jpg'),
PosixPath('dest/2017-11/IMG_20171104_104158.jpg'),
PosixPath('dest/2017-11/IMG_20171104_104159.jpg'),
PosixPath('dest/2018-07'),
PosixPath('dest/2018-07/VID_20180724_173611.mp4'),
PosixPath('dest/2018-10'),
PosixPath('dest/2018-10/IMG_20181001_124203.gif'),
]
)
result = sorting_pictures.search_directory(src)
result = [p.relative_to(tmp_path) for p in result]
assert sorted(result) == sorted(
[
PosixPath('src/IMG_20171022_010203_01.jpg'),
PosixPath('src/VID'),
PosixPath('src/IMG_20171022_124203.unknown_suffix'),
PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/no-m'),
PosixPath('src/no-metadata.jpg'),
PosixPath('src/no-metadata'),
PosixPath('src/a6a5e930cac831ef4e00255c51872867.jpg.json'),
PosixPath('src/metadata-copy.jpg'),
PosixPath('src/metadata.jpg'),
PosixPath('src/not_image_name.jpg.json'),
])
log = sorting_pictures.log
log['parse'] = [p.relative_to(tmp_path) for p in log['parse']]
log['suffix'] = [p.relative_to(tmp_path) for p in log['suffix']]
log['collisions'] = [(p_s.relative_to(tmp_path), p_d.relative_to(tmp_path)) for (p_s, p_d) in log['collisions']]
log['exif'] = [p.relative_to(tmp_path) for p in log['exif']]
log['google_json_date'] = [p.relative_to(tmp_path) for p in log['google_json_date']]
expected = {'collisions': [(PosixPath('src/IMG_20171022_010203_01.jpg'),
PosixPath('dest/2017-10/IMG_20171022_010203.jpg'))],
'exif': [],
'google_json_date': [PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/metadata-copy.jpg'),
PosixPath('src/metadata.jpg'),
PosixPath('src/no-metadata.jpg')],
'parse': [PosixPath('src/metadata-copy.jpg'),
PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/no-metadata.jpg'),
PosixPath('src/metadata.jpg'), ],
'suffix': [PosixPath('src/VID'),
PosixPath('src/IMG_20171022_124203.unknown_suffix'),
PosixPath('src/a6a5e930cac831ef4e00255c51872867.jpg.json'),
PosixPath('src/not_image_name.jpg.json'),
]}
log = {k: sorted(v) for k, v in log.items()}
expected = {k: sorted(v) for k, v in expected.items()}
assert log == expected
def test_unknown_suffix(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
dest = tmp_path / 'dest'
if src.exists():
shutil.rmtree(src)
if dest.exists():
shutil.rmtree(dest)
src.mkdir(parents=True, exist_ok=True)
bad_suffix = src / 'IMG_20170102_030405.UNKNOWN_FOOBAR'
bad_suffix.touch()
sorting_pictures.sort_images(src, dest)
result = sorting_pictures.search_directory(dest)
assert result == list()
log = sorting_pictures.log
log['suffix'] = [p.relative_to(tmp_path) for p in log['suffix']]
assert log == {
'parse': [],
'exif': [],
'google_json_date': [],
'suffix': [PosixPath('src/IMG_20170102_030405.UNKNOWN_FOOBAR')],
'collisions': []}
def test_run_copy_dry_run(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
dest = tmp_path / 'dest'
if src.exists():
shutil.rmtree(src)
if dest.exists():
shutil.rmtree(dest)
shutil.copytree('sample-images', src, symlinks=True)
sorting_pictures.sort_images(src, dest, dry_run=True)
result = sorting_pictures.search_directory(dest)
result = [p.relative_to(tmp_path) for p in result]
assert result == []
log = sorting_pictures.log
log['parse'] = [p.relative_to(tmp_path) for p in log['parse']]
log['suffix'] = [p.relative_to(tmp_path) for p in log['suffix']]
log['collisions'] = [(p_s.relative_to(tmp_path), p_d.relative_to(tmp_path)) for (p_s, p_d) in log['collisions']]
log['exif'] = [p.relative_to(tmp_path) for p in log['exif']]
log['google_json_date'] = [p.relative_to(tmp_path) for p in log['google_json_date']]
expected = {'collisions': [(PosixPath('src/IMG_20171022_010203_01.jpg'),
PosixPath('dest/2017-10/IMG_20171022_010203.jpg'))],
'exif': [],
'google_json_date': [],
'parse': [PosixPath('src/metadata-copy.jpg'),
PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/no-metadata.jpg'),
PosixPath('src/metadata.jpg'), ],
'suffix': [PosixPath('src/VID'),
PosixPath('src/IMG_20171022_124203.unknown_suffix'),
PosixPath('src/a6a5e930cac831ef4e00255c51872867.jpg.json'),
PosixPath('src/not_image_name.jpg.json'),
]}
log = {k: sorted(v) for k, v in log.items()}
expected = {k: sorted(v) for k, v in expected.items()}
assert log == expected
def test_run_move_dry_run(self, sorting_pictures, tmp_path):
src = tmp_path / 'src'
dest = tmp_path / 'dest'
if src.exists():
shutil.rmtree(src)
if dest.exists():
shutil.rmtree(dest)
shutil.copytree('sample-images', src, symlinks=True)
sorting_pictures.sort_images(src, dest, move=True, dry_run=True)
result = sorting_pictures.search_directory(dest)
result = [p.relative_to(tmp_path) for p in result]
assert result == []
log = sorting_pictures.log
log['parse'] = [p.relative_to(tmp_path) for p in log['parse']]
log['suffix'] = [p.relative_to(tmp_path) for p in log['suffix']]
log['collisions'] = [(p_s.relative_to(tmp_path), p_d.relative_to(tmp_path)) for (p_s, p_d) in log['collisions']]
log['exif'] = [p.relative_to(tmp_path) for p in log['exif']]
log['google_json_date'] = [p.relative_to(tmp_path) for p in log['google_json_date']]
expected = {'collisions': [(PosixPath('src/IMG_20171022_010203_01.jpg'),
PosixPath('dest/2017-10/IMG_20171022_010203.jpg'))],
'exif': [],
'google_json_date': [],
'parse': [PosixPath('src/metadata-copy.jpg'),
PosixPath('src/IMG_NO_PARSE.jpg'),
PosixPath('src/no-metadata.jpg'),
PosixPath('src/metadata.jpg'), ],
'suffix': [PosixPath('src/VID'),
PosixPath('src/IMG_20171022_124203.unknown_suffix'),
PosixPath('src/a6a5e930cac831ef4e00255c51872867.jpg.json'),
PosixPath('src/not_image_name.jpg.json'),
]}
log = {k: sorted(v) for k, v in log.items()}
expected = {k: sorted(v) for k, v in expected.items()}
assert log == expected
class TestMain:
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_basic(self, mock_parser, mock_sort_images, sorting_pictures, namespace):
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
mock_sort_images.assert_called_with(PosixPath('src'), PosixPath('dest'), move=False,
exif=False, google_json_date=False, dry_run=False)
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_basic_exif_true(self, mock_parser, mock_sort_images, sorting_pictures, namespace):
namespace.exif = True
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
mock_sort_images.assert_called_with(PosixPath('src'), PosixPath('dest'), move=False,
exif=True, google_json_date=False, dry_run=False)
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_basic_google_json_true(self, mock_parser, mock_sort_images, sorting_pictures, namespace):
namespace.google_json = True
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
mock_sort_images.assert_called_with(PosixPath('src'), PosixPath('dest'), move=False,
exif=False, google_json_date=True, dry_run=False)
@patch('sys.exit')
@patch('sort.SortingPictures.parse_arguments')
def test_basic_exif_and_google_json_date(self, mock_parser, mock_exit, sorting_pictures, namespace):
namespace.exif = True
namespace.google_json = True
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
mock_exit.assert_called_once_with(1)
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_basic_dry_run(self, mock_parser, mock_sort_images, sorting_pictures, namespace):
namespace.dry_run = True
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
mock_sort_images.assert_called_with(PosixPath('src'), PosixPath('dest'), move=False,
exif=False, google_json_date=False, dry_run=True)
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_basic_move(self, mock_parser, mock_sort_images, sorting_pictures, namespace):
namespace.move = True
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
mock_sort_images.assert_called_with(PosixPath('src'), PosixPath('dest'), move=True,
exif=False, google_json_date=False, dry_run=False)
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_multi_src(self, mock_parser, mock_sort_images, sorting_pictures, namespace):
namespace.paths = 'src0 src1 src2 dest'.split()
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
assert mock_sort_images.call_args_list == [
call(PosixPath('src0'), PosixPath('dest'), move=False, exif=False, google_json_date=False, dry_run=False),
call(PosixPath('src1'), PosixPath('dest'), move=False, exif=False, google_json_date=False, dry_run=False),
call(PosixPath('src2'), PosixPath('dest'), move=False, exif=False, google_json_date=False, dry_run=False)]
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_multi_src_move(self, mock_parser, mock_sort_images, sorting_pictures, namespace):
namespace.move = True
namespace.paths = 'src0 src1 src2 dest'.split()
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
assert mock_sort_images.call_args_list == [
call(PosixPath('src0'), PosixPath('dest'), move=True, exif=False, google_json_date=False, dry_run=False),
call(PosixPath('src1'), PosixPath('dest'), move=True, exif=False, google_json_date=False, dry_run=False),
call(PosixPath('src2'), PosixPath('dest'), move=True, exif=False, google_json_date=False, dry_run=False),
]
@patch('sys.exit')
@patch('sort.SortingPictures.parse_arguments')
def test_bad_order(self, mock_parser, mock_exit, sorting_pictures, namespace):
namespace.paths = 'src0 src1 src2 dest --collisions'.split()
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
mock_exit.assert_called_once_with(1)
mock_exit.reset_mock()
namespace.paths = 'src0 src1 --collisions src2 dest'.split()
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
mock_exit.assert_called_once_with(1)
@patch('sys.exit')
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_too_few_args(self, mock_parser, mock_sort_images, mock_exit, sorting_pictures, namespace):
namespace.paths = 'src'.split()
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.main()
mock_sort_images.assert_not_called()
mock_exit.assert_called_once_with(1)
@patch('builtins.print')
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_collisions_true(self, mock_parser, mock_sorting_pictures, mock_print, sorting_pictures, namespace):
namespace.collisions = True
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.log['collisions'] = [('a', 'b')]
sorting_pictures.log['suffix'] = ['a.UNKNOWN']
sorting_pictures.log['parse'] = ['metadata.jpg']
sorting_pictures.main()
assert mock_print.mock_calls == [call('collisions', 'a', 'b')]
@patch('builtins.print')
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_suffix_true(self, mock_parser, mock_sorting_pictures, mock_print, sorting_pictures, namespace):
namespace.suffix = True
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.log['collisions'] = [('a', 'b')]
sorting_pictures.log['suffix'] = ['a.UNKNOWN']
sorting_pictures.log['parse'] = ['metadata.jpg']
sorting_pictures.main()
assert mock_print.mock_calls == [call('suffix', 'a.UNKNOWN')]
@patch('builtins.print')
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_parse_true(self, mock_parser, mock_sorting_pictures, mock_print, sorting_pictures, namespace):
namespace.parse = True
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.log['collisions'] = [('a', 'b')]
sorting_pictures.log['suffix'] = ['a.UNKNOWN']
sorting_pictures.log['parse'] = ['metadata.jpg']
sorting_pictures.main()
assert mock_print.mock_calls == [call('parse', 'metadata.jpg')]
@patch('builtins.print')
@patch('sort.SortingPictures.sort_images')
@patch('sort.SortingPictures.parse_arguments')
def test_all_output_options(self, mock_parser, mock_sorting_pictures, mock_print, sorting_pictures, namespace):
namespace.collisions = True
namespace.suffix = True
namespace.parse = True
mock_parser.return_value.parse_args.return_value = namespace
sorting_pictures.log['collisions'] = [('a', 'b')]
sorting_pictures.log['suffix'] = ['a.UNKNOWN']
sorting_pictures.log['parse'] = ['metadata.jpg']
sorting_pictures.main()
assert mock_print.mock_calls == [call('collisions', 'a', 'b'),
call('suffix', 'a.UNKNOWN'),
call('parse', 'metadata.jpg')]
| 45.644693
| 120
| 0.60024
| 4,735
| 40,852
| 4.941711
| 0.044139
| 0.094876
| 0.036326
| 0.03342
| 0.905039
| 0.874952
| 0.843455
| 0.83666
| 0.831916
| 0.802299
| 0
| 0.068918
| 0.279325
| 40,852
| 894
| 121
| 45.695749
| 0.725859
| 0.000441
| 0
| 0.722455
| 0
| 0
| 0.213652
| 0.138336
| 0
| 0
| 0
| 0
| 0.124128
| 1
| 0.076709
| false
| 0
| 0.009763
| 0.002789
| 0.101813
| 0.016736
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6e710270fe6fbd8b5352fb19cfcf491ba400103d
| 11,681
|
py
|
Python
|
models/nn.py
|
AlejandroCatalina/lstm_wind_energy
|
6107502fb478040c5942b4ac5bc03c86b0cd1776
|
[
"MIT"
] | 1
|
2020-08-16T06:45:54.000Z
|
2020-08-16T06:45:54.000Z
|
models/nn.py
|
AlejandroCatalina/lstm_wind_energy
|
6107502fb478040c5942b4ac5bc03c86b0cd1776
|
[
"MIT"
] | null | null | null |
models/nn.py
|
AlejandroCatalina/lstm_wind_energy
|
6107502fb478040c5942b4ac5bc03c86b0cd1776
|
[
"MIT"
] | null | null | null |
import numpy as np
import pyro
import pyro.distributions as dist
import pyro.infer
import pyro.optim
import torch
import torch.nn as nn
from torch.distributions import LogNormal, Normal
class LSTM(nn.Module):
def __init__(self,
input_dim,
hidden_dim,
batch_size,
output_dim=1,
dropout=0.0,
num_layers=2):
super(LSTM, self).__init__()
self.input_dim = input_dim
self.hidden_dim = hidden_dim
self.batch_size = batch_size
self.num_layers = num_layers
# Define the LSTM layer
self.lstm = nn.LSTM(
self.input_dim, self.hidden_dim, self.num_layers, dropout=dropout)
# Define the output layer
self.linear = nn.Linear(self.hidden_dim, output_dim)
def init_hidden(self):
# This is what we'll initialise our hidden state as
return (torch.zeros(self.num_layers, self.batch_size, self.hidden_dim),
torch.zeros(self.num_layers, self.batch_size, self.hidden_dim))
def forward(self, input):
# Forward pass through LSTM layer
# shape of lstm_out: [input_size, batch_size, hidden_dim]
# shape of self.hidden: (a, b), where a and b both
# have shape (num_layers, batch_size, hidden_dim).
lstm_out, self.hidden = self.lstm(
input.view(-1, self.batch_size, self.input_dim))
# Only take the output from the final timetep
# Can pass on the entirety of lstm_out to the next layer if it is a seq2seq prediction
y_pred = self.linear(lstm_out[-1].view(self.batch_size, -1))
return y_pred.view(-1)
class CLSTM(nn.Module):
def __init__(self,
input_dim,
hidden_dim,
batch_size,
output_dim=1,
num_lstm_layers=1,
num_conv_layers=1,
num_dense_layers=0,
dropout_conv=0.2,
dropout_lstm=0.2,
kernel_size=3,
padding=1,
pool_kernel_size=2,
input_channels=8,
height=15,
width=8):
super(CLSTM, self).__init__()
self.height = height
self.width = width
self.input_channels = input_channels
self.input_dim = input_dim
self.hidden_dim = hidden_dim
self.batch_size = batch_size
self.dropout_conv = dropout_conv
self.dropout_lstm = dropout_lstm
self.num_lstm_layers = num_lstm_layers
self.num_conv_layers = num_conv_layers
self.num_dense_layers = num_dense_layers
self.padding = padding
self.kernel_size = kernel_size
self.pool_kernel_size = pool_kernel_size
total_layers = self.num_lstm_layers + self.num_dense_layers + self.num_conv_layers
if isinstance(self.hidden_dim, list):
if len(self.hidden_dim) != total_layers:
pad = nn.ConstantPad1d((0, total_layers), self.hidden_dim[-1])
self.hidden_dim = pad(torch.IntTensor(self.hidden_dim))
self.hidden_dim = self.hidden_dim.numpy().tolist()
else:
self.hidden_dim = [self.hidden_dim] * total_layers
# compute out dimension and define layers
out_11, out_22 = self.height, self.width
# first input_channels is the number of channels in the data
input_channels = self.input_channels
self.convs = []
for output_channels in self.hidden_dim[:num_conv_layers]:
# define the conv layers
# output is (num_train, hidden_dim, out_1, out_2)
self.convs.append(
nn.Conv2d(
input_channels,
output_channels,
self.kernel_size,
padding=self.padding).cuda())
# input channels for text conv layers is the output channels
# of the previous one
input_channels = output_channels
out_1 = int(
np.floor((out_11 + 2 * self.padding - 1 *
(self.kernel_size - 1) - 1) / 1 + 1))
out_2 = int(
np.floor((out_22 + 2 * self.padding - 1 *
(self.kernel_size - 1) - 1) / 1 + 1))
# out_11 = int(
# np.floor((out_1 - 1 *
# (self.pool_kernel_size - 1) - 1) / self.pool_kernel_size + 1))
# out_22 = int(
# np.floor((out_2 - 1 *
# (self.pool_kernel_size - 1) - 1) / self.pool_kernel_size + 1))
out_11, out_22 = out_1, out_2
self.relu = nn.ReLU()
# self.max_pool = nn.MaxPool2d(kernel_size=pool_kernel_size)
# output is (num_train, hidden_dim, out_11, out_22)
self.dropout = nn.Dropout(p=self.dropout_conv)
self.dim = output_channels * out_1 * out_2
# self.dim = output_channels * out_11 * out_22
# define dense layers before lstm
self.dense = []
dim = self.dim
for dense_dim in self.hidden_dim[self.num_conv_layers:
self.num_dense_layers]:
self.dense.append(nn.Linear(dim, dense_dim).cuda())
dim = dense_dim
# Define the LSTM layer
# hidden_dim of the LSTM is shared among all LSTM layers
# so there must be only 1 hidden_dim attached to the LSTMs
self.lstm_dim = dim
self.lstm_hidden_dim = self.hidden_dim[-1]
self.lstm = nn.LSTM(
dim,
self.lstm_hidden_dim,
self.num_lstm_layers,
dropout=self.dropout_lstm)
# Define the output layer
self.linear = nn.Linear(self.lstm_hidden_dim, output_dim)
def init_hidden(self):
return (torch.zeros(self.num_lstm_layers, self.batch_size,
self.lstm_hidden_dim), torch.zeros(
self.num_lstm_layers, self.batch_size,
self.lstm_hidden_dim))
def forward(self, input):
input_view = input.view(self.batch_size, self.height, self.width,
self.input_channels)
input_tr = input_view.permute([0, 3, 1, 2])
# sequentially apply conv layers
for conv in self.convs:
# input_tr = self.dropout(self.relu(self.max_pool(conv(input_tr))))
input_tr = self.dropout(self.relu(conv(input_tr)))
conv_out = input_tr
input_dense = conv_out
for linear in self.dense:
input_dense = self.dropout(
linear(input_dense.view(self.batch_size, -1)))
# Forward pass through LSTM layer
# shape input to LSTM must be (input_size, batch_size, dim)
# shape of lstm_out: [input_size, batch_size, hidden_dim]
# shape of self.hidden: (a, b), where a and b both
# have shape (num_lstm_layers, batch_size, hidden_dim).
lstm_out, self.hidden = self.lstm(
input_dense.view(-1, self.batch_size, self.lstm_dim))
# Only take the output from the final timetep
# Can pass on the entirety of lstm_out to the next layer if it is a seq2seq prediction
y_pred = self.linear(lstm_out[-1].view(self.batch_size, -1))
return y_pred.view(-1)
class CNN(nn.Module):
def __init__(self,
input_dim,
hidden_dim,
batch_size,
output_dim=1,
num_conv_layers=1,
num_dense_layers=1,
dropout_conv=0.2,
kernel_size=3,
padding=1,
pool_kernel_size=2,
input_channels=8,
height=15,
width=8):
super(CNN, self).__init__()
self.height = height
self.width = width
self.input_channels = input_channels
self.input_dim = input_dim
self.hidden_dim = hidden_dim
self.batch_size = batch_size
self.num_conv_layers = num_conv_layers
self.dropout_conv = dropout_conv
self.padding = padding
self.kernel_size = kernel_size
self.pool_kernel_size = pool_kernel_size
self.num_dense_layers = num_dense_layers
total_layers = self.num_conv_layers + self.num_dense_layers
if isinstance(self.hidden_dim, list):
if len(self.hidden_dim) != total_layers:
pad = nn.ConstantPad1d((0, total_layers), self.hidden_dim[-1])
self.hidden_dim = pad(torch.IntTensor(self.hidden_dim))
self.hidden_dim = self.hidden_dim.numpy().tolist()
else:
self.hidden_dim = [self.hidden_dim] * total_layers
# compute out dimension and define layers
out_11, out_22 = self.height, self.width
self.convs = []
# first input_channels is the number of channels in the data
input_channels = self.input_channels
for output_channels in self.hidden_dim[:num_conv_layers]:
# define the conv layers
# output is (num_train, hidden_dim, out_1, out_2)
self.convs.append(
nn.Conv2d(
input_channels,
output_channels,
self.kernel_size,
padding=self.padding).cuda())
# input channels for text conv layers is the output channels
# of the previous one
input_channels = output_channels
out_1 = int(
np.floor((out_11 + 2 * self.padding - 1 *
(self.kernel_size - 1) - 1) / 1 + 1))
out_2 = int(
np.floor((out_22 + 2 * self.padding - 1 *
(self.kernel_size - 1) - 1) / 1 + 1))
# out_11 = int(
# np.floor((out_1 - 1 *
# (self.pool_kernel_size - 1) - 1) / self.pool_kernel_size + 1))
# out_22 = int(
# np.floor((out_2 - 1 *
# (self.pool_kernel_size - 1) - 1) / self.pool_kernel_size + 1))
out_11, out_22 = out_1, out_2
self.relu = nn.ReLU()
# self.max_pool = nn.MaxPool2d(kernel_size=pool_kernel_size)
# output is (num_train, hidden_dim, out_11, out_22)
self.dropout = nn.Dropout(p=self.dropout_conv)
self.dim = output_channels * out_1 * out_2
# self.dim = output_channels * out_11 * out_22
# define dense layers
self.dense = []
dim = self.dim
for hidden_dim in self.hidden_dim[num_conv_layers:]:
self.dense.append(nn.Linear(dim, hidden_dim).cuda())
dim = hidden_dim
self.dense.append(nn.Linear(dim, output_dim).cuda())
# Define the output layer
# self.linear = nn.Linear(self.dim, output_dim)
def forward(self, input):
input_view = input.view(self.batch_size, self.height, self.width,
self.input_channels)
input_tr = input_view.permute([0, 3, 1, 2])
# sequentially apply conv layers
for conv in self.convs:
# input_tr = self.dropout(self.relu(self.max_pool(conv(input_tr))))
input_tr = self.dropout(self.relu(conv(input_tr)))
conv_out = input_tr
input_dense = conv_out
for linear in self.dense:
input_dense = self.dropout(
linear(input_dense.view(self.batch_size, -1)))
y_pred = input_dense
return y_pred.view(-1)
| 38.298361
| 94
| 0.568016
| 1,506
| 11,681
| 4.146746
| 0.088977
| 0.077822
| 0.06245
| 0.028183
| 0.877022
| 0.855244
| 0.804644
| 0.782226
| 0.742354
| 0.722178
| 0
| 0.022481
| 0.345005
| 11,681
| 304
| 95
| 38.424342
| 0.793752
| 0.217105
| 0
| 0.748768
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039409
| false
| 0
| 0.039409
| 0.009852
| 0.118227
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ea0672855b4e30adc8a5e2e8c518de05c0e64ef
| 68,685
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC/cmp_GemsFDTD/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC/cmp_GemsFDTD/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_ml/SystemIPC/cmp_GemsFDTD/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0699222,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.257609,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.391941,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.187215,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.324188,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.185931,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.697333,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.124965,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.79997,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.074046,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00678667,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0746711,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0501916,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.148717,
'Execution Unit/Register Files/Runtime Dynamic': 0.0569783,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.199592,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.544512,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 1.96181,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000129392,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000129392,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000111995,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 4.29688e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000721006,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00109179,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00126583,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0482505,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.06915,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.117623,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.16388,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.43889,
'Instruction Fetch Unit/Runtime Dynamic': 0.332111,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.127126,
'L2/Runtime Dynamic': 0.0372235,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.29389,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.04022,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0665413,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0665412,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.60939,
'Load Store Unit/Runtime Dynamic': 1.43492,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.164079,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.328158,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0582323,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0601337,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.190828,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0193061,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.449999,
'Memory Management Unit/Runtime Dynamic': 0.0794398,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 19.9871,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.258329,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0126817,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0939102,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.364921,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 4.21042,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0263559,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.22339,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.139668,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0546275,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0881121,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.044476,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.187216,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0410637,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.15066,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0263863,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00229132,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0265443,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0169457,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0529306,
'Execution Unit/Register Files/Runtime Dynamic': 0.019237,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0625118,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.164479,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.999699,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 4.05644e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 4.05644e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 3.53757e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.37187e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000243427,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000359931,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00038735,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0162904,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.03621,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0381479,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0553294,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.30501,
'Instruction Fetch Unit/Runtime Dynamic': 0.110515,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0425415,
'L2/Runtime Dynamic': 0.0128499,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.92486,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.348049,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0222502,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0222501,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.02993,
'Load Store Unit/Runtime Dynamic': 0.480029,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0548651,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.10973,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0194718,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0201081,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0644275,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00626143,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.253986,
'Memory Management Unit/Runtime Dynamic': 0.0263695,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.3716,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0694104,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00330935,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0268057,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0995254,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.72899,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0204913,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.218784,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.117108,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0462527,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0746039,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0376575,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.158514,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0349451,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.0937,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0221242,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00194005,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0214383,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0143478,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0435625,
'Execution Unit/Register Files/Runtime Dynamic': 0.0162879,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0502884,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.137914,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.936877,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 3.56099e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 3.56099e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 3.0896e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.18947e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000206108,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000308224,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000345715,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0137929,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 0.877351,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0330863,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.046847,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.13845,
'Instruction Fetch Unit/Runtime Dynamic': 0.0943802,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0374064,
'L2/Runtime Dynamic': 0.0108138,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.81246,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.291083,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0186135,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0186136,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.90035,
'Load Store Unit/Runtime Dynamic': 0.401492,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0458978,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.0917959,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0162893,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0168493,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0545505,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00542955,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.238641,
'Memory Management Unit/Runtime Dynamic': 0.0222788,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 12.998,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0581992,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00279507,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0227126,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0837069,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.54955,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0167229,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.215823,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.107395,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0455796,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0735182,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0371095,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.156207,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0356643,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.0719,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0202892,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00191181,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0193908,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.014139,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.03968,
'Execution Unit/Register Files/Runtime Dynamic': 0.0160508,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0450326,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.1325,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.925959,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 4.09019e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 4.09019e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 3.56535e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.38173e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000203108,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000320566,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000391166,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0135922,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 0.864581,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0342178,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0461652,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.12506,
'Instruction Fetch Unit/Runtime Dynamic': 0.094687,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.038241,
'L2/Runtime Dynamic': 0.0106277,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.80898,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.289274,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.018501,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0185009,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 1.89634,
'Load Store Unit/Runtime Dynamic': 0.399015,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0456202,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.0912399,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0161908,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0167631,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0537565,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00561521,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.237678,
'Memory Management Unit/Runtime Dynamic': 0.0223783,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 12.9587,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0533712,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00270594,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0225161,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0785933,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.53126,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 8.23870643681251,
'Runtime Dynamic': 8.23870643681251,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.434847,
'Runtime Dynamic': 0.170736,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 59.7502,
'Peak Power': 92.8625,
'Runtime Dynamic': 9.19096,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 59.3154,
'Total Cores/Runtime Dynamic': 9.02022,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.434847,
'Total L3s/Runtime Dynamic': 0.170736,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.147702
| 124
| 0.682245
| 8,095
| 68,685
| 5.782829
| 0.067696
| 0.123387
| 0.112792
| 0.093309
| 0.937751
| 0.92989
| 0.917521
| 0.886311
| 0.862108
| 0.842519
| 0
| 0.132698
| 0.224081
| 68,685
| 914
| 125
| 75.147702
| 0.745675
| 0
| 0
| 0.642232
| 0
| 0
| 0.656684
| 0.048045
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ec9b1c071ae2ec2294afd58051789021b6fded2
| 23,810
|
py
|
Python
|
user_service_sdk/api/user_admin/user_admin_client.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | 5
|
2019-07-31T04:11:05.000Z
|
2021-01-07T03:23:20.000Z
|
user_service_sdk/api/user_admin/user_admin_client.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
user_service_sdk/api/user_admin/user_admin_client.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import sys
import user_service_sdk.api.user_admin.alter_password_pb2
import google.protobuf.empty_pb2
import user_service_sdk.api.user_admin.alter_self_password_pb2
import user_service_sdk.api.user_admin.forgot_password_pb2
import user_service_sdk.api.user_admin.get_password_conf_pb2
import user_service_sdk.api.user_admin.get_user_info_pb2
import google.protobuf.struct_pb2
import user_service_sdk.api.user_admin.list_groups_id_name_pb2
import user_service_sdk.api.user_admin.list_users_pb2
import user_service_sdk.api.user_admin.list_users_id_nick_pb2
import user_service_sdk.api.user_admin.reset_password_pb2
import user_service_sdk.api.user_admin.search_all_user_group_pb2
import user_service_sdk.api.user_admin.search_all_users_pb2
import user_service_sdk.api.user_admin.user_delete_pb2
import user_service_sdk.api.user_admin.user_login_info_pb2
import user_service_sdk.api.user_admin.user_register_pb2
import user_service_sdk.utils.http_util
import google.protobuf.json_format
class UserAdminClient(object):
def __init__(self, server_ip="", server_port=0, service_name="", host=""):
"""
初始化client
:param server_ip: 指定sdk请求的server_ip,为空时走名字服务路由
:param server_port: 指定sdk请求的server_port,与server_ip一起使用, 为空时走名字服务路由
:param service_name: 指定sdk请求的service_name, 为空时按契约名称路由。如果server_ip和service_name同时设置,server_ip优先级更高
:param host: 指定sdk请求服务的host名称, 如cmdb.easyops-only.com
"""
if server_ip == "" and server_port != 0 or server_ip != "" and server_port == 0:
raise Exception("server_ip和server_port必须同时指定")
self._server_ip = server_ip
self._server_port = server_port
self._service_name = service_name
self._host = host
def alter_password(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.alter_password_pb2.AlterPasswordRequest, int, str, int) -> google.protobuf.empty_pb2.Empty
"""
修改密码[内部]
:param request: alter_password请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.empty_pb2.Empty
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.AlterPassword"
uri = "/api/v1/users/alter_password"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.empty_pb2.Empty()
google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True)
return rsp
def alter_self_password(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.alter_self_password_pb2.AlterSelfPasswordRequest, int, str, int) -> google.protobuf.empty_pb2.Empty
"""
修改自己密码
:param request: alter_self_password请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.empty_pb2.Empty
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.AlterSelfPassword"
uri = "/api/v1/users/password"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.empty_pb2.Empty()
google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True)
return rsp
def forgot_password(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.forgot_password_pb2.ForgotPasswordRequest, int, str, int) -> google.protobuf.empty_pb2.Empty
"""
用户忘记密码
:param request: forgot_password请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.empty_pb2.Empty
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.ForgotPassword"
uri = "/api/v1/users/password/forgot"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.empty_pb2.Empty()
google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True)
return rsp
def get_password_config(self, request, org, user, timeout=10):
# type: (google.protobuf.empty_pb2.Empty, int, str, int) -> user_service_sdk.api.user_admin.get_password_conf_pb2.GetPasswordConfigResponse
"""
获取密码配置
:param request: get_password_config请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: user_service_sdk.api.user_admin.get_password_conf_pb2.GetPasswordConfigResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.GetPasswordConfig"
uri = "/api/v1/users/passconf"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="GET",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = user_service_sdk.api.user_admin.get_password_conf_pb2.GetPasswordConfigResponse()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def get_user_info(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.get_user_info_pb2.GetUserInfoRequest, int, str, int) -> google.protobuf.struct_pb2.Struct
"""
获取用户信息
:param request: get_user_info请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.struct_pb2.Struct
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.GetUserInfo"
uri = "/api/v1/users/detail/{username}".format(
username=request.username,
)
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="GET",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.struct_pb2.Struct()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def list_groups_id_name(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.list_groups_id_name_pb2.ListGroupsIdNameRequest, int, str, int) -> google.protobuf.struct_pb2.Struct
"""
获取用户Id与name映射
:param request: list_groups_id_name请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.struct_pb2.Struct
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.ListGroupsIdName"
uri = "/api/v1/groups/id_map_name"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="GET",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.struct_pb2.Struct()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def list_users_info(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.list_users_pb2.ListUsersInfoRequest, int, str, int) -> user_service_sdk.api.user_admin.list_users_pb2.ListUsersInfoResponse
"""
获取用户信息列表
:param request: list_users_info请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: user_service_sdk.api.user_admin.list_users_pb2.ListUsersInfoResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.ListUsersInfo"
uri = "/api/v1/users"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="GET",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = user_service_sdk.api.user_admin.list_users_pb2.ListUsersInfoResponse()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def list_users_id_nick(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.list_users_id_nick_pb2.ListUsersIdNickRequest, int, str, int) -> google.protobuf.struct_pb2.Struct
"""
获取用户name与昵称映射
:param request: list_users_id_nick请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.struct_pb2.Struct
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.ListUsersIdNick"
uri = "/api/v1/users/id_map_nickname"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="GET",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.struct_pb2.Struct()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def reset_password(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.reset_password_pb2.ResetPasswordRequest, int, str, int) -> google.protobuf.empty_pb2.Empty
"""
用户重置密码
:param request: reset_password请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.empty_pb2.Empty
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.ResetPassword"
uri = "/api/v1/users/password/reset"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.empty_pb2.Empty()
google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True)
return rsp
def search_all_user_group(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.search_all_user_group_pb2.SearchAllUserGroupRequest, int, str, int) -> user_service_sdk.api.user_admin.search_all_user_group_pb2.SearchAllUserGroupResponse
"""
搜索所有用户组列表
:param request: search_all_user_group请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: user_service_sdk.api.user_admin.search_all_user_group_pb2.SearchAllUserGroupResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.SearchAllUserGroup"
uri = "/api/v1/users/group/all"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = user_service_sdk.api.user_admin.search_all_user_group_pb2.SearchAllUserGroupResponse()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def search_all_users_info(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.search_all_users_pb2.SearchAllUsersInfoRequest, int, str, int) -> user_service_sdk.api.user_admin.search_all_users_pb2.SearchAllUsersInfoResponse
"""
搜索所有用户信息列表
:param request: search_all_users_info请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: user_service_sdk.api.user_admin.search_all_users_pb2.SearchAllUsersInfoResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.SearchAllUsersInfo"
uri = "/api/v1/users/all"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = user_service_sdk.api.user_admin.search_all_users_pb2.SearchAllUsersInfoResponse()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def user_delete(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.user_delete_pb2.UserDeleteRequest, int, str, int) -> google.protobuf.empty_pb2.Empty
"""
用户删除[内部]
:param request: user_delete请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: google.protobuf.empty_pb2.Empty
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.UserDelete"
uri = "/api/v1/users/{username}".format(
username=request.username,
)
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="DELETE",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = google.protobuf.empty_pb2.Empty()
google.protobuf.json_format.ParseDict(rsp_obj, rsp, ignore_unknown_fields=True)
return rsp
def get_user_login_info(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.user_login_info_pb2.GetUserLoginInfoRequest, int, str, int) -> user_service_sdk.api.user_admin.user_login_info_pb2.GetUserLoginInfoResponse
"""
查询用户登录信息
:param request: get_user_login_info请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: user_service_sdk.api.user_admin.user_login_info_pb2.GetUserLoginInfoResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.GetUserLoginInfo"
uri = "/api/v1/user/login_info"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="GET",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = user_service_sdk.api.user_admin.user_login_info_pb2.GetUserLoginInfoResponse()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
def user_register(self, request, org, user, timeout=10):
# type: (user_service_sdk.api.user_admin.user_register_pb2.UserRegisterRequest, int, str, int) -> user_service_sdk.api.user_admin.user_register_pb2.UserRegisterResponse
"""
用户注册[内部]
:param request: user_register请求
:param org: 客户的org编号,为数字
:param user: 调用api使用的用户名
:param timeout: 调用超时时间,单位秒
:return: user_service_sdk.api.user_admin.user_register_pb2.UserRegisterResponse
"""
headers = {"org": org, "user": user}
route_name = ""
server_ip = self._server_ip
if self._service_name != "":
route_name = self._service_name
elif self._server_ip != "":
route_name = "easyops.api.user_service.user_admin.UserRegister"
uri = "/api/v1/users/register"
requestParam = request
rsp_obj = user_service_sdk.utils.http_util.do_api_request(
method="POST",
src_name="logic.user_service_sdk",
dst_name=route_name,
server_ip=server_ip,
server_port=self._server_port,
host=self._host,
uri=uri,
params=google.protobuf.json_format.MessageToDict(
requestParam, preserving_proto_field_name=True),
headers=headers,
timeout=timeout,
)
rsp = user_service_sdk.api.user_admin.user_register_pb2.UserRegisterResponse()
google.protobuf.json_format.ParseDict(rsp_obj["data"], rsp, ignore_unknown_fields=True)
return rsp
| 38.341385
| 204
| 0.625661
| 2,727
| 23,810
| 5.120645
| 0.064173
| 0.069321
| 0.074191
| 0.054784
| 0.87389
| 0.867588
| 0.862504
| 0.860284
| 0.837726
| 0.813807
| 0
| 0.006899
| 0.281604
| 23,810
| 620
| 205
| 38.403226
| 0.809471
| 0.207518
| 0
| 0.765152
| 0
| 0
| 0.087347
| 0.075292
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037879
| false
| 0.05303
| 0.050505
| 0
| 0.126263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
6edaad3ab088b634a88f433402a343d534c73f70
| 12,955
|
py
|
Python
|
app/mock/products.py
|
algorithm-ssau/coursework
|
9bc463ea2f096fda0a891e1cb26bbe34ae73da50
|
[
"MIT"
] | 4
|
2020-04-03T05:16:54.000Z
|
2020-06-04T18:39:40.000Z
|
app/mock/products.py
|
algorithm-ssau/coursework
|
9bc463ea2f096fda0a891e1cb26bbe34ae73da50
|
[
"MIT"
] | 16
|
2020-04-14T21:50:04.000Z
|
2021-06-14T01:16:41.000Z
|
app/mock/products.py
|
algorithm-ssau/coursework
|
9bc463ea2f096fda0a891e1cb26bbe34ae73da50
|
[
"MIT"
] | 1
|
2020-05-26T11:27:55.000Z
|
2020-05-26T11:27:55.000Z
|
MOCK_SALE = [{
"id": 1,
"title": "Demix Magus M",
"description": "Lightweight and flexible Demix Magus training sneakers. Special grooves on the sole, located in places of the anatomical fold of the foot, allow you to move naturally. Ultra-lightweight knitwear with a special weave provides air circulation and also effectively removes excess heat and moisture. EVA soles and modern upper materials guarantee a low weight model.", # pylint: disable=line-too-long
"tags": ["Fashion", "Sneakers", "Shoes"],
"price": {
"min": 1126,
"max": 1234
}
}, {
"id": 2,
"title": "Nike Star Runner 2",
"description": "Comfortable and functional Nike Star Runner 2 sneakers are great for running. The Phylon midsole effectively dampens shock loads. Outsole grooves for maximum natural movement. Rubber outsole for reliable grip. The upper is made of elastic breathable fabric, supplemented with leather elements, fixes the foot and guarantees comfort while running. Soft inserts in the ankle and on the tongue provide additional comfort.", # pylint: disable=line-too-long
"tags": ["Fashion", "Men", "Autumn"],
"price": {
"min": 1126,
"max": 2232
}
}, {
"id": 3,
"title": "Skechers Go Run 600",
"description": "Skechers Go Run 600 Divert Running Shoes The model is designed for neutral and hypopronization. Sole made of lightweight 5GEN material quickly restores shape after deformation, providing comfort and additional cushioning while running. The special GOGA Run insole has good cushioning properties. The antibacterial properties of the insole protect against unpleasant odors.", # pylint: disable=line-too-long
"tags": ["Men", "Running", "Leather"],
"price": {
"min": 1156,
"max": 1334
}
}, {
"id": 4,
"title": "Skechers Dynamight 2.0-Rayhill",
"description": "Comfortable and soft Dynimate 2 sneakers from Skechers are the optimal combination of comfort and original design. A special Memory Foam insole made of soft foam follows the contour of the foot, providing additional comfort, and quickly regains shape after deformation. The upper of the shoe is made of well-ventilated textile material.", # pylint: disable=line-too-long
"tags": ["Running", "Leather", "Summer"],
"price": {
"min": 1120,
"max": 5234
}
}, {
"id": 5,
"title": "Nike Tanjun",
"description": "In Japanese, tanjun means simplicity. Nike Tanjun sporty men's sneakers mean simplicity at its best. A smooth, seamless upper provides comfortable wearing. Flexible textile upper for air circulation. Lightweight outsole provides good cushioning.", # pylint: disable=line-too-long
"tags": ["Shoes", "Winter", "Men"],
"price": {
"min": 1826,
"max": 2423
}
}, {
"id": 6,
"title": "Puma Vista",
"description": "Vista Mid WTR sneakers inspired by the legendary 70s collection. The warmed model in sports style is irreplaceable in cold weather. Fur lining perfectly protects from cold. Anatomical insole SoftFoam + provides maximum comfort. Combination of genuine leather and suede for a spectacular look. Insole with antibacterial impregnation for freshness of the legs.", # pylint: disable=line-too-long
"tags": ["Winter", "Men", "Running"],
"price": {
"min": 1626,
"max": 1274
}
}, {
"id": 7,
"title": "Fila Tornado 3.0",
"description": "Comfortable Fila TORNADO LOW 3.0 sneakers are perfect for sports lovers. Slip design allows for quick change of shoes. Anatomical insole for added comfort. The EVA midsole effectively dampens shock loads while walking. An improved sole protector for reliable traction. Reflectors will make walking in the dark or in bad weather safer. Sole material has high wear-resistant properties.", # pylint: disable=line-too-long
"tags": ["Fashion", "Winter", "Shoes"],
"price": {
"min": 1196,
"max": 1234
}
}, {
"id": 8,
"title": "Fila Ray",
"description": "Light and comfortable Fila Ray sneakers are made in a modern sports style with retro and classic elements. The EVA midsole absorbs shock when walking. Sole material has high wear-resistant properties. EVA outsole ensures low shoe weight.", # pylint: disable=line-too-long
"tags": ["Winter", "Autumn", "Men"],
"price": {
"min": 1526,
"max": 1634
}
}, {
"id": 9,
"title": "Kappa Neoclassic 2.0",
"description": "The warmed Kappa Neoclassic 2 sneakers perfectly complete the look in a sporty style and warm in cold weather. A tall model with a warm fleece lining helps keep you warm. Genuine leather looks great and comfortable to wear. EVA midsole material dampens shock when walking. Rubber outsole provides excellent traction. Thanks to a wearproof outsole, sneakers will last you longer.", # pylint: disable=line-too-long
"tags": ["Shoes", "Winter", "Men"],
"price": {
"min": 1826,
"max": 3294
}
}, {
"id": 10,
"title": "ASICS Gel-Rocket 9",
"description": "Asics GEL-ROCKET 9 volleyball shoes are the perfect combination of comfort and stability. The tech model will be a great choice for indoor games. Forefoot Gel gel inserts in the front part and EVA midsole effectively absorb shock loads. Mesh material for optimal air exchange and a comfortable microclimate. Trusstic technology for extra foot support. A removable EVA footbed ensures comfortable foot position.", # pylint: disable=line-too-long
"tags": ["Fashion", "Sneakers", "Shoes"],
"price": {
"min": 2126,
"max": 2234
}
}, {
"id": 11,
"title": "Nike Zoom Zero",
"description": "The modified Nike Court Air Zoom Zero tennis sneakers are a great choice for games and workouts. The full-length insert of the Nike Zoom Air sneakers provides excellent cushioning. The one-piece upper provides air exchange and a comfortable fit. A well thought-out lacing system for a good fit.", # pylint: disable=line-too-long
"tags": ["Summer", "Men", "Fashion"],
"price": {
"min": 6126,
"max": 9234
}
}, {
"id": 12,
"title": "Puma Wired",
"description": "Comfortable Puma sneakers for a sporty outing. SoftFoam + foam insole for comfort and extra cushioning. The top is made of mesh breathable material. IMEVA wavy midsole absorbs shock loads.", # pylint: disable=line-too-long
"tags": ["Fashion", "Men", "Autumn"],
"price": {
"min": 2126,
"max": 3244
}
}]
MOCK_EXPLORE = [{
"id": 1,
"title": "Demix Magus M",
"description": "Lightweight and flexible Demix Magus training sneakers. Special grooves on the sole, located in places of the anatomical fold of the foot, allow you to move naturally. Ultra-lightweight knitwear with a special weave provides air circulation and also effectively removes excess heat and moisture. EVA soles and modern upper materials guarantee a low weight model.", # pylint: disable=line-too-long
"tags": ["Fashion", "Sneakers", "Shoes"],
"price": {
"min": 1234,
"max": 1234
}
}, {
"id": 2,
"title": "Nike Star Runner 2",
"description": "Comfortable and functional Nike Star Runner 2 sneakers are great for running. The Phylon midsole effectively dampens shock loads. Outsole grooves for maximum natural movement. Rubber outsole for reliable grip. The upper is made of elastic breathable fabric, supplemented with leather elements, fixes the foot and guarantees comfort while running. Soft inserts in the ankle and on the tongue provide additional comfort.", # pylint: disable=line-too-long
"tags": ["Fashion", "Men", "Autumn"],
"price": {
"min": 2232,
"max": 2232
}
}, {
"id": 3,
"title": "Skechers Go Run 600",
"description": "Skechers Go Run 600 Divert Running Shoes The model is designed for neutral and hypopronization. Sole made of lightweight 5GEN material quickly restores shape after deformation, providing comfort and additional cushioning while running. The special GOGA Run insole has good cushioning properties. The antibacterial properties of the insole protect against unpleasant odors.", # pylint: disable=line-too-long
"tags": ["Men", "Running", "Leather"],
"price": {
"min": 1334,
"max": 1334
}
}, {
"id": 4,
"title": "Skechers Dynamight 2.0-Rayhill",
"description": "Comfortable and soft Dynimate 2 sneakers from Skechers are the optimal combination of comfort and original design. A special Memory Foam insole made of soft foam follows the contour of the foot, providing additional comfort, and quickly regains shape after deformation. The upper of the shoe is made of well-ventilated textile material.", # pylint: disable=line-too-long
"tags": ["Running", "Leather", "Summer"],
"price": {
"min": 5234,
"max": 5234
}
}, {
"id": 5,
"title": "Nike Tanjun",
"description": "In Japanese, tanjun means simplicity. Nike Tanjun sporty men's sneakers mean simplicity at its best. A smooth, seamless upper provides comfortable wearing. Flexible textile upper for air circulation. Lightweight outsole provides good cushioning.", # pylint: disable=line-too-long
"tags": ["Shoes", "Winter", "Men"],
"price": {
"min": 2423,
"max": 2423
}
}, {
"id": 6,
"title": "Puma Vista",
"description": "Vista Mid WTR sneakers inspired by the legendary 70s collection. The warmed model in sports style is irreplaceable in cold weather. Fur lining perfectly protects from cold. Anatomical insole SoftFoam + provides maximum comfort. Combination of genuine leather and suede for a spectacular look. Insole with antibacterial impregnation for freshness of the legs.", # pylint: disable=line-too-long
"tags": ["Winter", "Men", "Running"],
"price": {
"min": 1274,
"max": 1274
}
}, {
"id": 7,
"title": "Fila Tornado 3.0",
"description": "Comfortable Fila TORNADO LOW 3.0 sneakers are perfect for sports lovers. Slip design allows for quick change of shoes. Anatomical insole for added comfort. The EVA midsole effectively dampens shock loads while walking. An improved sole protector for reliable traction. Reflectors will make walking in the dark or in bad weather safer. Sole material has high wear-resistant properties.", # pylint: disable=line-too-long
"tags": ["Fashion", "Winter", "Shoes"],
"price": {
"min": 1234,
"max": 1234
}
}, {
"id": 8,
"title": "Fila Ray",
"description": "Light and comfortable Fila Ray sneakers are made in a modern sports style with retro and classic elements. The EVA midsole absorbs shock when walking. Sole material has high wear-resistant properties. EASY EVA outsole ensures low shoe weight.", # pylint: disable=line-too-long
"tags": ["Winter", "Autumn", "Men"],
"price": {
"min": 1634,
"max": 1634
}
}, {
"id": 9,
"title": "Kappa Neoclassic 2.0",
"description": "The warmed Kappa Neoclassic 2 sneakers perfectly complete the look in a sporty style and warm in cold weather. A tall model with a warm fleece lining helps keep you warm. Genuine leather looks great and comfortable to wear. EVA midsole material dampens shock when walking. Rubber outsole provides excellent traction. Thanks to a wearproof outsole, sneakers will last you longer.", # pylint: disable=line-too-long
"tags": ["Shoes", "Winter", "Men"],
"price": {
"min": 3294,
"max": 3294
}
}, {
"id": 10,
"title": "Nike Zoom Zero",
"description": "The modified Nike Court Air Zoom Zero tennis sneakers are a great choice for games and workouts. The full-length insert of the Nike Zoom Air sneakers provides excellent cushioning. The one-piece upper provides air exchange and a comfortable fit. A well thought-out lacing system for a good fit.", # pylint: disable=line-too-long
"tags": ["Summer", "Men", "Fashion"],
"price": {
"min": 2234,
"max": 2234
}
}, {
"id": 11,
"title": "Nike Zoom Zero",
"description": "The modified Nike Court Air Zoom Zero tennis sneakers are a great choice for games and workouts. The full-length insert of the Nike Zoom Air sneakers provides excellent cushioning. The one-piece upper provides air exchange and a comfortable fit. A well thought-out lacing system for a good fit.", # pylint: disable=line-too-long
"tags": ["Summer", "Men", "Fashion"],
"price": {
"min": 9234,
"max": 9234
}
}, {
"id": 12,
"title": "Puma Wired",
"description": "Comfortable Puma sneakers for a sporty outing. SoftFoam + foam insole for comfort and extra cushioning. The top is made of mesh breathable material. IMEVA wavy midsole absorbs shock loads.", # pylint: disable=line-too-long
"tags": ["Fashion", "Men", "Autumn"],
"price": {
"min": 3244,
"max": 3244
}
}]
| 58.886364
| 473
| 0.683057
| 1,725
| 12,955
| 5.128696
| 0.183188
| 0.035266
| 0.046117
| 0.054256
| 0.948231
| 0.942579
| 0.942579
| 0.941675
| 0.941675
| 0.941675
| 0
| 0.026017
| 0.210807
| 12,955
| 219
| 474
| 59.155251
| 0.8393
| 0.0555
| 0
| 0.788991
| 0
| 0.110092
| 0.771026
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6eedb184ddd6055943755d14318941e72f227ab0
| 75
|
py
|
Python
|
vindinium/bots/__init__.py
|
IntelliAgent/vindinium-python-bot
|
0f101740ecaabf300d1f3768e236dfe9cae966b7
|
[
"WTFPL"
] | null | null | null |
vindinium/bots/__init__.py
|
IntelliAgent/vindinium-python-bot
|
0f101740ecaabf300d1f3768e236dfe9cae966b7
|
[
"WTFPL"
] | null | null | null |
vindinium/bots/__init__.py
|
IntelliAgent/vindinium-python-bot
|
0f101740ecaabf300d1f3768e236dfe9cae966b7
|
[
"WTFPL"
] | null | null | null |
from .raw_bot import *
from .base_bot import *
from .russelbot import *
| 10.714286
| 24
| 0.72
| 11
| 75
| 4.727273
| 0.545455
| 0.346154
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 75
| 6
| 25
| 12.5
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
42bca4acad9cf3382fe8460504bd0db5203ae581
| 31,527
|
py
|
Python
|
market_data/test/test_replayed_market_data.py
|
alphamatic/amp
|
5018137097159415c10eaa659a2e0de8c4e403d4
|
[
"BSD-3-Clause"
] | 5
|
2021-08-10T23:16:44.000Z
|
2022-03-17T17:27:00.000Z
|
market_data/test/test_replayed_market_data.py
|
alphamatic/amp
|
5018137097159415c10eaa659a2e0de8c4e403d4
|
[
"BSD-3-Clause"
] | 330
|
2021-06-10T17:28:22.000Z
|
2022-03-31T00:55:48.000Z
|
market_data/test/test_replayed_market_data.py
|
alphamatic/amp
|
5018137097159415c10eaa659a2e0de8c4e403d4
|
[
"BSD-3-Clause"
] | 6
|
2021-06-10T17:20:32.000Z
|
2022-03-28T08:08:03.000Z
|
import logging
from typing import Any, Callable, Tuple
import pandas as pd
import helpers.hasyncio as hasynci
import helpers.hprint as hprint
import helpers.hunit_test as hunitest
import market_data.market_data_example as mdmadaex
import market_data.replayed_market_data as mdremada
_LOG = logging.getLogger(__name__)
def _check_get_data(
self_: Any,
initial_replayed_delay: int,
func: Callable,
expected_df_as_str: str,
) -> mdremada.ReplayedMarketData:
"""
- Build `ReplayedTimePriceInterval`
- Execute the function `get_data*` in `func`
- Check actual output against expected.
"""
with hasynci.solipsism_context() as event_loop:
# Build ReplayedTimePriceInterval.
start_datetime = pd.Timestamp("2000-01-01 09:30:00-05:00")
end_datetime = pd.Timestamp("2000-01-01 10:29:00-05:00")
asset_ids = [1000]
(market_data, _,) = mdmadaex.get_ReplayedTimeMarketData_example2(
event_loop,
start_datetime,
end_datetime,
initial_replayed_delay,
asset_ids
# TODO(gp): initial_replayed_delay -> initial_delay_in_mins (or in secs).
)
# Execute function under test.
actual_df = func(market_data)
# Check.
actual_df = actual_df[sorted(actual_df.columns)]
actual_df_as_str = hprint.df_to_short_str("df", actual_df)
_LOG.info("-> %s", actual_df_as_str)
self_.assert_equal(
actual_df_as_str,
expected_df_as_str,
dedent=True,
fuzzy_match=True,
)
return market_data
class TestReplayedMarketData1(hunitest.TestCase):
def check_last_end_time(
self,
market_data: mdremada.ReplayedMarketData,
expected_last_end_time: pd.Timestamp,
expected_is_online: bool,
) -> None:
"""
Check output of `get_last_end_time()` and `is_online()`.
"""
#
last_end_time = market_data.get_last_end_time()
_LOG.info("-> last_end_time=%s", last_end_time)
self.assertEqual(last_end_time, expected_last_end_time)
#
is_online = market_data.is_online()
_LOG.info("-> is_online=%s", is_online)
self.assertEqual(is_online, expected_is_online)
def test_get_data1(self) -> None:
"""
- Set the current time to 9:35
- Get the last 5 mins of data
- The returned data should be in [9:30, 9:35]
"""
initial_replayed_delay = 5
#
period = "last_5mins"
normalize_data = True
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# pylint: disable=line-too-long
expected_df_as_str = """
# df=
df.index in [2000-01-01 09:31:00-05:00, 2000-01-01 09:35:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(5, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:31:00-05:00 1000 999.874540 2000-01-01 09:30:00-05:00 2000-01-01 09:31:00-05:00
2000-01-01 09:32:00-05:00 1000 1000.325254 2000-01-01 09:31:00-05:00 2000-01-01 09:32:00-05:00
2000-01-01 09:33:00-05:00 1000 1000.557248 2000-01-01 09:32:00-05:00 2000-01-01 09:33:00-05:00
...
2000-01-01 09:33:00-05:00 1000 1000.557248 2000-01-01 09:32:00-05:00 2000-01-01 09:33:00-05:00
2000-01-01 09:34:00-05:00 1000 1000.655907 2000-01-01 09:33:00-05:00 2000-01-01 09:34:00-05:00
2000-01-01 09:35:00-05:00 1000 1000.311925 2000-01-01 09:34:00-05:00 2000-01-01 09:35:00-05:00"""
# pylint: enable=line-too-long
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = pd.Timestamp("2000-01-01 09:35:00-05:00")
expected_is_online = True
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
def test_get_data2(self) -> None:
"""
Same as test_get_data1() but with normalize_data=False.
"""
initial_replayed_delay = 5
#
period = "last_5mins"
normalize_data = False
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# pylint: disable=line-too-long
expected_df_as_str = """
# df=
df.index in [0, 4]
df.columns=asset_id,end_datetime,last_price,start_datetime,timestamp_db
df.shape=(5, 5)
asset_id end_datetime last_price start_datetime timestamp_db
0 1000 2000-01-01 09:31:00-05:00 999.874540 2000-01-01 09:30:00-05:00 2000-01-01 09:31:00-05:00
1 1000 2000-01-01 09:32:00-05:00 1000.325254 2000-01-01 09:31:00-05:00 2000-01-01 09:32:00-05:00
2 1000 2000-01-01 09:33:00-05:00 1000.557248 2000-01-01 09:32:00-05:00 2000-01-01 09:33:00-05:00
...
2 1000 2000-01-01 09:33:00-05:00 1000.557248 2000-01-01 09:32:00-05:00 2000-01-01 09:33:00-05:00
3 1000 2000-01-01 09:34:00-05:00 1000.655907 2000-01-01 09:33:00-05:00 2000-01-01 09:34:00-05:00
4 1000 2000-01-01 09:35:00-05:00 1000.311925 2000-01-01 09:34:00-05:00 2000-01-01 09:35:00-05:00"""
# pylint: enable=line-too-long
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = pd.Timestamp("2000-01-01 09:35:00-05:00")
expected_is_online = True
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
def test_get_data3(self) -> None:
"""
- Set the current time to 9:35
- Get the last 1 min of data
- The returned data should be at 9:35
"""
initial_replayed_delay = 5
#
period = "last_1min"
normalize_data = True
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# pylint: disable=line-too-long
expected_df_as_str = """
# df=
df.index in [2000-01-01 09:35:00-05:00, 2000-01-01 09:35:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(1, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:35:00-05:00 1000 1000.311925 2000-01-01 09:34:00-05:00 2000-01-01 09:35:00-05:00"""
# pylint: enable=line-too-long
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = pd.Timestamp("2000-01-01 09:35:00-05:00")
expected_is_online = True
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
def test_get_data4(self) -> None:
"""
- Set the current time to 9:50
- Get the last 10 mins of data
- The returned data should be in [9:40, 9:50]
"""
initial_replayed_delay = 20
#
period = "last_10mins"
normalize_data = True
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# pylint: disable=line-too-long
expected_df_as_str = """
# df=
df.index in [2000-01-01 09:41:00-05:00, 2000-01-01 09:50:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(10, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:41:00-05:00 1000 999.721952 2000-01-01 09:40:00-05:00 2000-01-01 09:41:00-05:00
2000-01-01 09:42:00-05:00 1000 1000.191862 2000-01-01 09:41:00-05:00 2000-01-01 09:42:00-05:00
2000-01-01 09:43:00-05:00 1000 1000.524304 2000-01-01 09:42:00-05:00 2000-01-01 09:43:00-05:00
...
2000-01-01 09:48:00-05:00 1000 999.430872 2000-01-01 09:47:00-05:00 2000-01-01 09:48:00-05:00
2000-01-01 09:49:00-05:00 1000 999.362817 2000-01-01 09:48:00-05:00 2000-01-01 09:49:00-05:00
2000-01-01 09:50:00-05:00 1000 999.154046 2000-01-01 09:49:00-05:00 2000-01-01 09:50:00-05:00"""
# pylint: enable=line-too-long
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = pd.Timestamp("2000-01-01 09:50:00-05:00")
expected_is_online = True
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
def test_get_data5(self) -> None:
"""
- Set the current time to 10:00
- Get data for the last day
- The returned data should be in [9:30, 10:00]
"""
initial_replayed_delay = 30
#
period = "last_day"
normalize_data = True
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# pylint: disable=line-too-long
expected_df_as_str = """
# df=
df.index in [2000-01-01 09:31:00-05:00, 2000-01-01 10:00:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(30, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:31:00-05:00 1000 999.874540 2000-01-01 09:30:00-05:00 2000-01-01 09:31:00-05:00
2000-01-01 09:32:00-05:00 1000 1000.325254 2000-01-01 09:31:00-05:00 2000-01-01 09:32:00-05:00
2000-01-01 09:33:00-05:00 1000 1000.557248 2000-01-01 09:32:00-05:00 2000-01-01 09:33:00-05:00
...
2000-01-01 09:58:00-05:00 1000 998.519053 2000-01-01 09:57:00-05:00 2000-01-01 09:58:00-05:00
2000-01-01 09:59:00-05:00 1000 998.611468 2000-01-01 09:58:00-05:00 2000-01-01 09:59:00-05:00
2000-01-01 10:00:00-05:00 1000 998.157918 2000-01-01 09:59:00-05:00 2000-01-01 10:00:00-05:00"""
# pylint: enable=line-too-long
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = pd.Timestamp("2000-01-01 10:00:00-05:00")
expected_is_online = True
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
def test_get_data6(self) -> None:
"""
- Set the current time to 10:00
- Get all data for specified period
- The returned data should be in [9:30, 10:00]
"""
initial_replayed_delay = 30
#
period = "all"
normalize_data = True
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# pylint: disable=line-too-long
expected_df_as_str = """
# df=
df.index in [2000-01-01 09:31:00-05:00, 2000-01-01 10:00:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(30, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:31:00-05:00 1000 999.874540 2000-01-01 09:30:00-05:00 2000-01-01 09:31:00-05:00
2000-01-01 09:32:00-05:00 1000 1000.325254 2000-01-01 09:31:00-05:00 2000-01-01 09:32:00-05:00
2000-01-01 09:33:00-05:00 1000 1000.557248 2000-01-01 09:32:00-05:00 2000-01-01 09:33:00-05:00
...
2000-01-01 09:58:00-05:00 1000 998.519053 2000-01-01 09:57:00-05:00 2000-01-01 09:58:00-05:00
2000-01-01 09:59:00-05:00 1000 998.611468 2000-01-01 09:58:00-05:00 2000-01-01 09:59:00-05:00
2000-01-01 10:00:00-05:00 1000 998.157918 2000-01-01 09:59:00-05:00 2000-01-01 10:00:00-05:00"""
# pylint: enable=line-too-long
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = pd.Timestamp("2000-01-01 10:00:00-05:00")
expected_is_online = True
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
def test_get_data_for_minute_0(self) -> None:
"""
The replayed time starts at the same time of the data to represent the
first minute of trading.
"""
initial_replayed_delay = 0
#
period = "last_5mins"
normalize_data = True
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# Check.
expected_df_as_str = """
# df=
df.shape=(0, 4)
Empty DataFrame
Columns: [asset_id, last_price, start_datetime, timestamp_db]
Index: []"""
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = None
expected_is_online = False
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
def test_get_data_for_minute_1(self) -> None:
"""
The replayed time starts one minute after the data to represent the
first minute of trading.
"""
initial_replayed_delay = 1
period = "last_5mins"
normalize_data = True
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# pylint: disable=line-too-long
expected_df_as_str = """
# df=
df.index in [2000-01-01 09:31:00-05:00, 2000-01-01 09:31:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(1, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:31:00-05:00 1000 999.87454 2000-01-01 09:30:00-05:00 2000-01-01 09:31:00-05:00"""
# pylint: enable=line-too-long
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = pd.Timestamp("2000-01-01 09:31:00-0500")
expected_is_online = True
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
def test_get_data_for_minute_3(self) -> None:
"""
The replayed time starts 3 minutes after the opening of the trading
day.
"""
initial_replayed_delay = 3
#
period = "last_5mins"
normalize_data = True
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# Check.
# pylint: disable=line-too-long
expected_df_as_str = """
# df=
df.index in [2000-01-01 09:31:00-05:00, 2000-01-01 09:33:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(3, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:31:00-05:00 1000 999.874540 2000-01-01 09:30:00-05:00 2000-01-01 09:31:00-05:00
2000-01-01 09:32:00-05:00 1000 1000.325254 2000-01-01 09:31:00-05:00 2000-01-01 09:32:00-05:00
2000-01-01 09:33:00-05:00 1000 1000.557248 2000-01-01 09:32:00-05:00 2000-01-01 09:33:00-05:00"""
# pylint: enable=line-too-long
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = pd.Timestamp("2000-01-01 09:33:00-05:00")
expected_is_online = True
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
def test_get_data_for_minute_6(self) -> None:
"""
The replayed time starts 6 minutes after the opening of the trading
day.
"""
initial_replayed_delay = 6
#
period = "last_5mins"
normalize_data = True
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# Check.
# pylint: disable=line-too-long
expected_df_as_str = """# df=
df.index in [2000-01-01 09:32:00-05:00, 2000-01-01 09:36:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(5, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:32:00-05:00 1000 1000.325254 2000-01-01 09:31:00-05:00 2000-01-01 09:32:00-05:00
2000-01-01 09:33:00-05:00 1000 1000.557248 2000-01-01 09:32:00-05:00 2000-01-01 09:33:00-05:00
2000-01-01 09:34:00-05:00 1000 1000.655907 2000-01-01 09:33:00-05:00 2000-01-01 09:34:00-05:00
...
2000-01-01 09:34:00-05:00 1000 1000.655907 2000-01-01 09:33:00-05:00 2000-01-01 09:34:00-05:00
2000-01-01 09:35:00-05:00 1000 1000.311925 2000-01-01 09:34:00-05:00 2000-01-01 09:35:00-05:00
2000-01-01 09:36:00-05:00 1000 999.967920 2000-01-01 09:35:00-05:00 2000-01-01 09:36:00-05:00"""
# pylint: enable=line-too-long
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = pd.Timestamp("2000-01-01 09:36:00-05:00")
expected_is_online = True
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
def test_get_data_for_minute_63(self) -> None:
"""
The replayed time starts 63 minutes after the opening of the trading
day.
"""
initial_replayed_delay = 63
#
period = "last_5mins"
normalize_data = True
func = lambda market_data: market_data.get_data_for_last_period(
period, normalize_data=normalize_data
)
# Check.
# pylint: disable=line-too-long
expected_df_as_str = """# df=
df.index in [2000-01-01 10:29:00-05:00, 2000-01-01 10:30:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(2, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 10:29:00-05:00 1000 998.224716 2000-01-01 10:28:00-05:00 2000-01-01 10:29:00-05:00
2000-01-01 10:30:00-05:00 1000 998.050046 2000-01-01 10:29:00-05:00 2000-01-01 10:30:00-05:00"""
# pylint: enable=line-too-long
market_data = _check_get_data(
self, initial_replayed_delay, func, expected_df_as_str
)
#
expected_last_end_time = pd.Timestamp("2000-01-01 10:30:00-0500")
expected_is_online = False
self.check_last_end_time(
market_data, expected_last_end_time, expected_is_online
)
# #############################################################################
class TestReplayedMarketData2(hunitest.TestCase):
# TODO(gp): Add same tests for the SQL version.
def test_get_data_for_interval1(self) -> None:
"""
- Start replaying time 5 minutes after the beginning of the day, i.e., the
current time is 9:35.
- Ask data for [9:30, 9:45]
- The returned data is [9:30, 9:35].
"""
# Start replaying time 5 minutes after the beginning of the day, so the
# current time is 9:35.
initial_replayed_delay = 5
# Ask data for 9:30 to 9:45.
start_ts = pd.Timestamp("2000-01-01 09:30:00-05:00")
end_ts = pd.Timestamp("2000-01-01 09:45:00-05:00")
ts_col_name = "end_datetime"
asset_ids = None
normalize_data = True
func = lambda market_data: market_data.get_data_for_interval(
start_ts,
end_ts,
ts_col_name,
asset_ids,
normalize_data=normalize_data,
)
# pylint: disable=line-too-long
expected_df_as_str = """
# df=
df.index in [2000-01-01 09:31:00-05:00, 2000-01-01 09:35:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(5, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:31:00-05:00 1000 999.874540 2000-01-01 09:30:00-05:00 2000-01-01 09:31:00-05:00
2000-01-01 09:32:00-05:00 1000 1000.325254 2000-01-01 09:31:00-05:00 2000-01-01 09:32:00-05:00
2000-01-01 09:33:00-05:00 1000 1000.557248 2000-01-01 09:32:00-05:00 2000-01-01 09:33:00-05:00
...
2000-01-01 09:33:00-05:00 1000 1000.557248 2000-01-01 09:32:00-05:00 2000-01-01 09:33:00-05:00
2000-01-01 09:34:00-05:00 1000 1000.655907 2000-01-01 09:33:00-05:00 2000-01-01 09:34:00-05:00
2000-01-01 09:35:00-05:00 1000 1000.311925 2000-01-01 09:34:00-05:00 2000-01-01 09:35:00-05:00"""
# pylint: enable=line-too-long
_check_get_data(self, initial_replayed_delay, func, expected_df_as_str)
def test_get_data_for_interval2(self) -> None:
"""
- Current time is 9:45
- Ask data in [9:35, 9:40]
- The returned data is [9:30, 9:40].
"""
initial_replayed_delay = 15
start_ts = pd.Timestamp("2000-01-01 09:35:00-05:00")
end_ts = pd.Timestamp("2000-01-01 09:40:00-05:00")
ts_col_name = "start_datetime"
asset_ids = None
normalize_data = True
func = lambda market_data: market_data.get_data_for_interval(
start_ts,
end_ts,
ts_col_name,
asset_ids,
normalize_data=normalize_data,
)
# pylint: disable=line-too-long
expected_df_as_str = r"""
# df=
df.index in [2000-01-01 09:36:00-05:00, 2000-01-01 09:40:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(5, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:36:00-05:00 1000 999.967920 2000-01-01 09:35:00-05:00 2000-01-01 09:36:00-05:00
2000-01-01 09:37:00-05:00 1000 999.526004 2000-01-01 09:36:00-05:00 2000-01-01 09:37:00-05:00
2000-01-01 09:38:00-05:00 1000 999.892180 2000-01-01 09:37:00-05:00 2000-01-01 09:38:00-05:00
...
2000-01-01 09:38:00-05:00 1000 999.892180 2000-01-01 09:37:00-05:00 2000-01-01 09:38:00-05:00
2000-01-01 09:39:00-05:00 1000 999.993295 2000-01-01 09:38:00-05:00 2000-01-01 09:39:00-05:00
2000-01-01 09:40:00-05:00 1000 1000.201367 2000-01-01 09:39:00-05:00 2000-01-01 09:40:00-05:00"""
# pylint: enable=line-too-long
_check_get_data(self, initial_replayed_delay, func, expected_df_as_str)
def test_get_data_at_timestamp1(self) -> None:
"""
- Current time is 9:45
- Ask data for 9:35
- The returned data is for 9:35
"""
initial_replayed_delay = 15
ts = pd.Timestamp("2000-01-01 09:35:00-05:00")
ts_col_name = "start_datetime"
asset_ids = None
normalize_data = True
func = lambda market_data: market_data.get_data_at_timestamp(
ts, ts_col_name, asset_ids, normalize_data=normalize_data
)
# pylint: disable=line-too-long
expected_df_as_str = r"""
# df=
df.index in [2000-01-01 09:36:00-05:00, 2000-01-01 09:36:00-05:00]
df.columns=asset_id,last_price,start_datetime,timestamp_db
df.shape=(1, 4)
asset_id last_price start_datetime timestamp_db
end_datetime
2000-01-01 09:36:00-05:00 1000 999.96792 2000-01-01 09:35:00-05:00 2000-01-01 09:36:00-05:00"""
# pylint: enable=line-too-long
_check_get_data(self, initial_replayed_delay, func, expected_df_as_str)
def test_get_data_at_timestamp2(self) -> None:
"""
- Current time is 9:45
- Ask data for 9:50
- The return data is empty
"""
initial_replayed_delay = 15
ts = pd.Timestamp("2000-01-01 09:50:00-05:00")
ts_col_name = "start_datetime"
asset_ids = None
normalize_data = True
func = lambda market_data: market_data.get_data_at_timestamp(
ts, ts_col_name, asset_ids, normalize_data=normalize_data
)
# pylint: disable=line-too-long
expected_df_as_str = r"""
# df=
df.shape=(0, 4)
Empty DataFrame
Columns: [asset_id, last_price, start_datetime, timestamp_db]
Index: []"""
# pylint: enable=line-too-long
_check_get_data(self, initial_replayed_delay, func, expected_df_as_str)
# #############################################################################
class TestReplayedMarketData3(hunitest.TestCase):
"""
Test `ReplayedMarketData.is_last_bar_available()` using simulated time.
"""
def test_get_last_end_time1(self) -> None:
with hasynci.solipsism_context() as event_loop:
# Build object.
start_datetime = pd.Timestamp("2000-01-01 09:30:00-05:00")
end_datetime = pd.Timestamp("2000-01-01 10:30:00-05:00")
asset_ids = [1000]
initial_replayed_delay = 5
delay_in_secs = 0
(market_data, _,) = mdmadaex.get_ReplayedTimeMarketData_example2(
event_loop,
start_datetime,
end_datetime,
initial_replayed_delay,
asset_ids,
delay_in_secs=delay_in_secs,
)
# Call method.
last_end_time = market_data.get_last_end_time()
# Check.
_LOG.info("-> last_end_time=%s", last_end_time)
self.assertEqual(last_end_time, pd.Timestamp("2000-01-01 09:35:00-05:00"))
# #########################################################################
def test_is_last_bar_available1(self) -> None:
"""
Wait for the market to open.
"""
initial_replayed_delay = -2
start_time, end_time, num_iter = self._run(initial_replayed_delay)
# Check.
expected_start_time = pd.Timestamp("2000-01-01 09:28:00-05:00")
self.assertEqual(start_time, expected_start_time)
#
expected_end_time = pd.Timestamp("2000-01-01 09:31:00-05:00")
self.assertEqual(end_time, expected_end_time)
#
expected_num_iter = 6
self.assertEqual(num_iter, expected_num_iter)
def test_is_last_bar_available2(self) -> None:
"""
The market is already opened.
"""
initial_replayed_delay = 5
start_time, end_time, num_iter = self._run(initial_replayed_delay)
# Check.
expected_start_time = pd.Timestamp("2000-01-01 09:35:00-05:00")
self.assertEqual(start_time, expected_start_time)
#
expected_end_time = pd.Timestamp("2000-01-01 09:35:00-05:00")
self.assertEqual(end_time, expected_end_time)
#
expected_num_iter = 0
self.assertEqual(num_iter, expected_num_iter)
def test_is_last_bar_available3(self) -> None:
"""
The market is closed, so we expect a timeout.
"""
initial_replayed_delay = 63
with self.assertRaises(TimeoutError):
self._run(initial_replayed_delay)
def _run(
self, initial_replayed_delay: int
) -> Tuple[pd.Timestamp, pd.Timestamp, int]:
"""
- Build a ReplayedMarketData
- Run `is_last_bar_available()`
"""
with hasynci.solipsism_context() as event_loop:
# Build a ReplayedMarketData.
start_datetime = pd.Timestamp("2000-01-01 09:30:00-05:00")
end_datetime = pd.Timestamp("2000-01-01 10:30:00-05:00")
asset_ids = [1000]
delay_in_secs = 0
sleep_in_secs = 30
time_out_in_secs = 60 * 5
(market_data, _,) = mdmadaex.get_ReplayedTimeMarketData_example2(
event_loop,
start_datetime,
end_datetime,
initial_replayed_delay,
asset_ids,
delay_in_secs=delay_in_secs,
sleep_in_secs=sleep_in_secs,
time_out_in_secs=time_out_in_secs,
)
# Run the method.
start_time, end_time, num_iter = hasynci.run(
market_data.wait_for_latest_data(),
event_loop=event_loop,
)
return start_time, end_time, num_iter
class TestReplayedMarketData4(hunitest.TestCase):
"""
Test `ReplayedMarketData.is_last_bar_available()` using simulated time.
"""
def test_is_last_bar_available1(self) -> None:
"""
Wait for the market to open.
"""
start_time, end_time, num_iter = self._run()
# Check.
expected_start_time = pd.Timestamp(
"2000-01-03 09:31:00-05:00", tz="America/New_York"
)
self.assertEqual(start_time, expected_start_time)
#
expected_end_time = pd.Timestamp(
"2000-01-03 09:31:30-05:00", tz="America/New_York"
)
self.assertEqual(end_time, expected_end_time)
#
expected_num_iter = 1
self.assertEqual(num_iter, expected_num_iter)
def _run(self) -> Tuple[pd.Timestamp, pd.Timestamp, int]:
"""
- Build a ReplayedMarketData
- Run `is_last_bar_available()`
"""
with hasynci.solipsism_context() as event_loop:
# Build a ReplayedMarketData.
(market_data, _,) = mdmadaex.get_ReplayedTimeMarketData_example4(
event_loop,
initial_replayed_delay=1,
start_datetime=pd.Timestamp(
"2000-01-03 09:31:00-05:00", tz="America/New_York"
),
end_datetime=pd.Timestamp(
"2000-01-03 09:31:00-05:00", tz="America/New_York"
),
asset_ids=[101, 202, 303],
)
# Run the method.
start_time, end_time, num_iter = hasynci.run(
market_data.wait_for_latest_data(),
event_loop=event_loop,
)
return start_time, end_time, num_iter
| 41.868526
| 114
| 0.59089
| 4,735
| 31,527
| 3.714044
| 0.056177
| 0.076083
| 0.07506
| 0.113158
| 0.883771
| 0.86802
| 0.848573
| 0.844194
| 0.824008
| 0.810759
| 0
| 0.226585
| 0.297269
| 31,527
| 752
| 115
| 41.924202
| 0.567186
| 0.104038
| 0
| 0.635674
| 0
| 0.129032
| 0.42619
| 0.028229
| 0
| 0
| 0
| 0.00266
| 0.026565
| 1
| 0.045541
| false
| 0
| 0.01518
| 0
| 0.074004
| 0.003795
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
42f4718a6a3a59e8c1149cda9f0208f8c5e802fd
| 2,877
|
py
|
Python
|
axelrod/tests/test_averagecopier.py
|
DumisaniZA/Axelrod
|
e59fc40ebb705afe05cea6f30e282d1e9c621259
|
[
"MIT"
] | 33
|
2015-02-20T11:36:48.000Z
|
2022-02-16T17:02:06.000Z
|
axelrod/tests/test_averagecopier.py
|
DumisaniZA/Axelrod
|
e59fc40ebb705afe05cea6f30e282d1e9c621259
|
[
"MIT"
] | 108
|
2015-02-18T14:15:44.000Z
|
2020-05-08T10:39:58.000Z
|
axelrod/tests/test_averagecopier.py
|
DumisaniZA/Axelrod
|
e59fc40ebb705afe05cea6f30e282d1e9c621259
|
[
"MIT"
] | 41
|
2015-02-18T13:40:04.000Z
|
2021-05-31T06:08:10.000Z
|
"""Test for the average_copier strategy."""
import random
import axelrod
from test_player import TestPlayer
class TestAverageCopier(TestPlayer):
name = "Average Copier"
player = axelrod.AverageCopier
stochastic = True
def test_strategy(self):
"""Test that the first strategy is picked randomly."""
random.seed(1)
P1 = axelrod.AverageCopier()
P2 = axelrod.Player()
self.assertEqual(P1.strategy(P2), 'C')
self.assertEqual(P1.strategy(P2), 'D')
self.assertEqual(P1.strategy(P2), 'D')
self.assertEqual(P1.strategy(P2), 'C')
self.assertEqual(P1.strategy(P2), 'C')
self.assertEqual(P1.strategy(P2), 'C')
self.assertEqual(P1.strategy(P2), 'D')
self.assertEqual(P1.strategy(P2), 'D')
self.assertEqual(P1.strategy(P2), 'C')
self.assertEqual(P1.strategy(P2), 'C')
def test_when_oppenent_all_Cs(self):
"""
Tests that if opponent has played all C then player chooses C
"""
random.seed(5)
P1 = axelrod.AverageCopier()
P2 = axelrod.Player()
P2.history = ['C', 'C', 'C', 'C']
self.assertEqual(P1.strategy(P2), 'C')
self.assertEqual(P1.strategy(P2), 'C')
self.assertEqual(P1.strategy(P2), 'C')
def test_when_opponent_all_Ds(self):
"""
Tests that if opponent has played all D then player chooses D
"""
random.seed(5)
P1 = axelrod.AverageCopier()
P2 = axelrod.Player()
P2.history = ['D', 'D', 'D', 'D']
self.assertEqual(P1.strategy(P2), 'D')
self.assertEqual(P1.strategy(P2), 'D')
self.assertEqual(P1.strategy(P2), 'D')
class TestNiceAverageCopier(TestPlayer):
name = "Nice Average Copier"
player = axelrod.NiceAverageCopier
stochastic = True
def test_strategy(self):
"""Test that the first strategy is cooperation."""
P1 = axelrod.NiceAverageCopier()
P2 = axelrod.Player()
self.assertEqual(P1.strategy(P2), 'C')
def test_when_oppenent_all_Cs(self):
"""
Tests that if opponent has played all C then player chooses C
"""
random.seed(5)
P1 = axelrod.NiceAverageCopier()
P2 = axelrod.Player()
P2.history = ['C', 'C', 'C', 'C']
self.assertEqual(P1.strategy(P2), 'C')
self.assertEqual(P1.strategy(P2), 'C')
self.assertEqual(P1.strategy(P2), 'C')
def test_when_opponent_all_Ds(self):
"""
Tests that if opponent has played all D then player chooses D
"""
random.seed(5)
P1 = axelrod.NiceAverageCopier()
P2 = axelrod.Player()
P2.history = ['D', 'D', 'D', 'D']
self.assertEqual(P1.strategy(P2), 'D')
self.assertEqual(P1.strategy(P2), 'D')
self.assertEqual(P1.strategy(P2), 'D')
| 31.615385
| 69
| 0.593674
| 350
| 2,877
| 4.822857
| 0.142857
| 0.204384
| 0.231635
| 0.34064
| 0.831754
| 0.831754
| 0.803318
| 0.803318
| 0.803318
| 0.785545
| 0
| 0.031619
| 0.263469
| 2,877
| 91
| 70
| 31.615385
| 0.764983
| 0.131734
| 0
| 0.836066
| 0
| 0
| 0.030113
| 0
| 0
| 0
| 0
| 0
| 0.377049
| 1
| 0.098361
| false
| 0
| 0.04918
| 0
| 0.278689
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
42f83643dd846e8d8625b1943918528e7274693e
| 97
|
py
|
Python
|
tests/test_version.py
|
DjaPy/fastapi-asyncapi
|
8a99f29bd1bf9041859c662d9b5dba612e0d60ec
|
[
"MIT"
] | 10
|
2021-02-20T00:21:24.000Z
|
2022-03-18T14:56:19.000Z
|
tests/test_version.py
|
DjaPy/fastapi-asyncapi
|
8a99f29bd1bf9041859c662d9b5dba612e0d60ec
|
[
"MIT"
] | 2
|
2022-02-04T12:54:33.000Z
|
2022-02-10T06:08:59.000Z
|
tests/test_version.py
|
DjaPy/fastapi-asyncapi
|
8a99f29bd1bf9041859c662d9b5dba612e0d60ec
|
[
"MIT"
] | 1
|
2022-02-06T12:04:21.000Z
|
2022-02-06T12:04:21.000Z
|
import fastapi_asyncapi
def test_version():
assert fastapi_asyncapi.__version__ == "0.1.0"
| 16.166667
| 50
| 0.752577
| 13
| 97
| 5.076923
| 0.692308
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036145
| 0.14433
| 97
| 5
| 51
| 19.4
| 0.759036
| 0
| 0
| 0
| 0
| 0
| 0.051546
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6e4a5de4cb4fa30529430cb38122dca73bb8673e
| 123
|
py
|
Python
|
deepchem/models/tensorgraph/__init__.py
|
n3011/deepchem
|
c316d998c462ce01032f0dae883856b400ea4765
|
[
"MIT"
] | 14
|
2017-10-12T23:47:23.000Z
|
2022-02-09T13:13:51.000Z
|
deepchem/models/tensorgraph/__init__.py
|
n3011/deepchem
|
c316d998c462ce01032f0dae883856b400ea4765
|
[
"MIT"
] | 2
|
2017-08-02T20:35:13.000Z
|
2017-08-10T21:17:31.000Z
|
deepchem/models/tensorgraph/__init__.py
|
n3011/deepchem
|
c316d998c462ce01032f0dae883856b400ea4765
|
[
"MIT"
] | 5
|
2017-03-19T01:48:13.000Z
|
2019-02-22T01:12:03.000Z
|
from deepchem.models.tensorgraph.tensor_graph import TensorGraph, TFWrapper
from deepchem.models.tensorgraph import models
| 123
| 123
| 0.878049
| 15
| 123
| 7.133333
| 0.533333
| 0.224299
| 0.336449
| 0.542056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 123
| 1
| 123
| 123
| 0.938596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
285d7f777578a794d7c4b0291a04c15070a301fc
| 7,158
|
py
|
Python
|
tests/unit/test_visit_flowers.py
|
man-group/hiveminder
|
0ec31d7abce11dde7cce99fe262781d254df2eb7
|
[
"BSD-2-Clause"
] | 5
|
2022-01-27T16:38:53.000Z
|
2022-02-09T22:43:15.000Z
|
tests/unit/test_visit_flowers.py
|
man-group/hiveminder
|
0ec31d7abce11dde7cce99fe262781d254df2eb7
|
[
"BSD-2-Clause"
] | null | null | null |
tests/unit/test_visit_flowers.py
|
man-group/hiveminder
|
0ec31d7abce11dde7cce99fe262781d254df2eb7
|
[
"BSD-2-Clause"
] | 2
|
2020-11-04T19:51:15.000Z
|
2022-02-09T22:50:54.000Z
|
from __future__ import absolute_import
from hiveminder.game import GameState, make_flowers
from hiveminder.bee import Bee
from hiveminder.seed import Seed
from hiveminder.flower import Flower
from hiveminder.game_params import DEFAULT_GAME_PARAMETERS
from mock import sentinel, patch, Mock, ANY
import pytest
@patch.dict("hiveminder.game_state.rngs", {sentinel.game_id: sentinel.rng})
def test_visit_flowers_does_not_feed_bee_if_not_on_flower():
game = GameState(game_params=DEFAULT_GAME_PARAMETERS,
game_id=sentinel.game_id,
boards=1,
board_width=10,
board_height=10,
hives=(sentinel.hives,),
flowers=((Flower(5, 5, DEFAULT_GAME_PARAMETERS, 1, expires=DEFAULT_GAME_PARAMETERS.flower_lifespan),),),
game_length=sentinel.game_length)
game.boards[0].inflight[sentinel.bee_1] = Bee(5, 4, 0, 10, DEFAULT_GAME_PARAMETERS, 1)
game.visit_flowers()
assert game.boards[0].inflight == {sentinel.bee_1: Bee(5, 4, 0, 10, DEFAULT_GAME_PARAMETERS, 1)}
def test_visit_flowers_single_bee_single_flower():
game = GameState(game_params=DEFAULT_GAME_PARAMETERS,
game_id=sentinel.game_id,
boards=1,
board_width=10,
board_height=10,
hives=(sentinel.hives,),
flowers=((Flower(5, 5, DEFAULT_GAME_PARAMETERS, 1, expires=DEFAULT_GAME_PARAMETERS.flower_lifespan),),),
game_length=sentinel.game_length)
game.boards[0].inflight[sentinel.bee_1] = Bee(5, 5, 0, 10, DEFAULT_GAME_PARAMETERS, 1)
game.visit_flowers()
assert game.boards[0].inflight == {sentinel.bee_1: Bee(5, 5, 0, 10 + DEFAULT_GAME_PARAMETERS.bee_energy_boost_per_nectar, DEFAULT_GAME_PARAMETERS, 2)}
def test_visit_flowers_two_bees_two_flowers():
game = GameState(game_params=DEFAULT_GAME_PARAMETERS,
game_id=sentinel.game_id,
boards=1,
board_width=10,
board_height=10,
hives=(sentinel.hives,),
flowers=((Flower(5, 5, DEFAULT_GAME_PARAMETERS, 1, expires=DEFAULT_GAME_PARAMETERS.flower_lifespan),
Flower(5, 4, DEFAULT_GAME_PARAMETERS, 1, expires=DEFAULT_GAME_PARAMETERS.flower_lifespan)),),
game_length=sentinel.game_length)
game.boards[0].inflight[sentinel.bee_1] = Bee(5, 5, 0, 10, DEFAULT_GAME_PARAMETERS, 0)
game.boards[0].inflight[sentinel.bee_2] = Bee(5, 4, 0, 10, DEFAULT_GAME_PARAMETERS, 0)
game.visit_flowers()
assert game.boards[0].inflight == {sentinel.bee_1: Bee(5, 5, 0, 10 + DEFAULT_GAME_PARAMETERS.bee_energy_boost_per_nectar, DEFAULT_GAME_PARAMETERS, 1),
sentinel.bee_2: Bee(5, 4, 0, 10 + DEFAULT_GAME_PARAMETERS.bee_energy_boost_per_nectar, DEFAULT_GAME_PARAMETERS, 1), }
def test_visit_flowers_two_bees_two_flowers_more_potent_and_live_longer():
flower1 = Flower(5, 5, DEFAULT_GAME_PARAMETERS, 3, expires=DEFAULT_GAME_PARAMETERS.flower_lifespan)
flower2 = Flower(5, 4, DEFAULT_GAME_PARAMETERS, 2, expires=DEFAULT_GAME_PARAMETERS.flower_lifespan)
game = GameState(game_params=DEFAULT_GAME_PARAMETERS,
game_id=sentinel.game_id,
boards=1,
board_width=10,
board_height=10,
hives=(sentinel.hives,),
flowers=((flower1, flower2),),
game_length=sentinel.game_length)
game.boards[0].inflight[sentinel.bee_1] = Bee(5, 5, 0, 10, DEFAULT_GAME_PARAMETERS, 3)
game.boards[0].inflight[sentinel.bee_2] = Bee(5, 4, 0, 10, DEFAULT_GAME_PARAMETERS, 2)
game.visit_flowers()
assert flower1.expires == flower2.expires == DEFAULT_GAME_PARAMETERS.flower_lifespan + DEFAULT_GAME_PARAMETERS.flower_lifespan_visit_impact
assert game.boards[0].inflight == {sentinel.bee_1: Bee(5, 5, 0, 10 + 3 * DEFAULT_GAME_PARAMETERS.bee_energy_boost_per_nectar, DEFAULT_GAME_PARAMETERS, 5),
sentinel.bee_2: Bee(5, 4, 0, 10 + 2 * DEFAULT_GAME_PARAMETERS.bee_energy_boost_per_nectar, DEFAULT_GAME_PARAMETERS, 4), }
@patch.dict("hiveminder.game_state.rngs", {sentinel.game_id: sentinel.rng})
def test_seed_does_not_visit_flower():
game = GameState(game_params=DEFAULT_GAME_PARAMETERS,
game_id=sentinel.game_id,
boards=1,
board_width=10,
board_height=10,
hives=(sentinel.hives,),
flowers=((Flower(5, 5, DEFAULT_GAME_PARAMETERS, 1, DEFAULT_GAME_PARAMETERS.flower_lifespan_visit_impact),),),
game_length=sentinel.game_length)
game.boards[0].inflight[sentinel.seed_1] = Seed(5, 5, 0)
game.visit_flowers()
assert game.boards[0].inflight == {sentinel.seed_1: Seed(5, 5, 0)}
SEED_GENERATION_VISITS = [DEFAULT_GAME_PARAMETERS.flower_seed_visit_initial_threshold +
(DEFAULT_GAME_PARAMETERS.flower_seed_visit_subsequent_threshold * x) for x in range(0, 8)]
@patch.dict("hiveminder.game_state.rngs", {sentinel.game_id: Mock(name="rng", **{"random.return_value": 1})})
@patch("hiveminder.board.uuid4", return_value=sentinel.uuid)
@pytest.mark.parametrize('visits', SEED_GENERATION_VISITS)
def test_visiting_flower_creates_seed_every_ten_visits(_, visits):
game = GameState(game_params=DEFAULT_GAME_PARAMETERS,
game_id=sentinel.game_id,
boards=1,
board_width=10,
board_height=10,
hives=(sentinel.hives,),
flowers=((Flower(5, 5, DEFAULT_GAME_PARAMETERS, 3, visits - 1, DEFAULT_GAME_PARAMETERS.flower_lifespan_visit_impact),),),
game_length=sentinel.game_length)
game.boards[0].inflight[sentinel.bee_1] = Bee(5, 5, 0, 10, DEFAULT_GAME_PARAMETERS)
game.visit_flowers()
assert game.boards[0].inflight == {sentinel.bee_1: Bee(5, 5, 0, 85, DEFAULT_GAME_PARAMETERS, 3),
'sentinel.uuid': Seed(5, 5, ANY)}
@pytest.mark.parametrize('visits', [n for n in range(0, 45) if n not in SEED_GENERATION_VISITS])
def test_visiting_flower_does_not_create_seeds_when_not_seed_threshold(visits):
game = GameState(game_params=DEFAULT_GAME_PARAMETERS,
game_id=sentinel.game_id,
boards=1,
board_width=10,
board_height=10,
hives=(sentinel.hives,),
flowers=((Flower(5, 5, DEFAULT_GAME_PARAMETERS, 3, visits - 1, DEFAULT_GAME_PARAMETERS.flower_lifespan_visit_impact),),),
game_length=sentinel.game_length)
game.boards[0].inflight[sentinel.bee_1] = Bee(5, 5, 0, 10, DEFAULT_GAME_PARAMETERS)
game.visit_flowers()
assert game.boards[0].inflight == {sentinel.bee_1: Bee(5, 5, 0, 85, DEFAULT_GAME_PARAMETERS, 3)}
| 51.128571
| 161
| 0.649623
| 904
| 7,158
| 4.811947
| 0.102876
| 0.128966
| 0.246207
| 0.069885
| 0.832184
| 0.832184
| 0.787816
| 0.741609
| 0.716092
| 0.705517
| 0
| 0.039941
| 0.247974
| 7,158
| 139
| 162
| 51.496403
| 0.768159
| 0
| 0
| 0.587156
| 0
| 0
| 0.020536
| 0.01397
| 0
| 0
| 0
| 0
| 0.073395
| 1
| 0.06422
| false
| 0
| 0.073395
| 0
| 0.137615
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
955fb0cfe9ee87a23163bda978fc46cf5781c5ae
| 529
|
py
|
Python
|
flaskr/populate_db.py
|
johnsliao/D3.js-flask
|
6785ecdd48e5d12eefd539681c9fe54b370723e3
|
[
"MIT"
] | 21
|
2016-05-14T15:53:39.000Z
|
2022-02-16T07:21:16.000Z
|
flaskr/populate_db.py
|
johnsliao/D3.js-flask
|
6785ecdd48e5d12eefd539681c9fe54b370723e3
|
[
"MIT"
] | 1
|
2017-12-28T17:02:44.000Z
|
2017-12-28T17:02:44.000Z
|
flaskr/populate_db.py
|
johnsliao/D3.js-flask
|
6785ecdd48e5d12eefd539681c9fe54b370723e3
|
[
"MIT"
] | 6
|
2017-10-23T20:54:12.000Z
|
2020-12-26T14:48:31.000Z
|
import sqlite3
conn = sqlite3.connect('flaskr.db')
c = conn.cursor()
c.execute('insert into entries (week, data1, data2) values (1, 5, 2)')
c.execute('insert into entries (week, data1, data2) values (2, 2, 4)')
c.execute('insert into entries (week, data1, data2) values (3, 9, 6)')
c.execute('insert into entries (week, data1, data2) values (4, 12, 8)')
c.execute('insert into entries (week, data1, data2) values (5, 20, 12)')
c.execute('insert into entries (week, data1, data2) values (6, 17, 18)')
conn.commit()
conn.close()
| 37.785714
| 72
| 0.68431
| 86
| 529
| 4.209302
| 0.348837
| 0.132597
| 0.232044
| 0.298343
| 0.745856
| 0.745856
| 0.745856
| 0.745856
| 0.745856
| 0
| 0
| 0.080963
| 0.136106
| 529
| 14
| 73
| 37.785714
| 0.71116
| 0
| 0
| 0
| 0
| 0
| 0.671698
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95614786765fb1eee5461063b13bad26899e9bd9
| 22,348
|
py
|
Python
|
tests/compiler/compose/compose_push/test_push_compose_agg.py
|
CCD-HRI/congregation
|
a552856b03a64a4295792184107c4e529ca3f4ae
|
[
"MIT"
] | 3
|
2020-10-05T16:30:15.000Z
|
2021-01-22T13:38:02.000Z
|
tests/compiler/compose/compose_push/test_push_compose_agg.py
|
CCD-HRI/congregation
|
a552856b03a64a4295792184107c4e529ca3f4ae
|
[
"MIT"
] | null | null | null |
tests/compiler/compose/compose_push/test_push_compose_agg.py
|
CCD-HRI/congregation
|
a552856b03a64a4295792184107c4e529ca3f4ae
|
[
"MIT"
] | 1
|
2021-02-19T12:40:57.000Z
|
2021-02-19T12:40:57.000Z
|
from congregation.lang import *
from congregation.dag import Dag
from congregation.dag.nodes.internal import *
from congregation.comp import PushDown, PushUp
from tests.utils import create_cols, compare_to_expected
import pytest
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateStdDev,
AggregateStdDevLocalSqrt,
Multiply,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False, False, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [set(), set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateStdDev,
AggregateStdDevLocalSqrt,
Multiply,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False, False, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [set(), set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateStdDev,
AggregateStdDevLocalSqrt,
Multiply,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False, False, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1, 2}, {1}, {1}, {1, 2}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [{2}, set(), set(), {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [
Create,
Create,
Concat,
AggregateStdDev,
AggregateStdDevLocalSqrt,
Multiply,
Collect
],
"requires_mpc": [True, True, True, True, False, False, False],
"ownership_data":[
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
)
])
def test_agg_std_dev(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
std_dev = aggregate(cc, "std_dev", [party_data[0]["col_names"][0]], party_data[0]["col_names"][1], "std_dev")
mult = multiply(std_dev, "mult", party_data[0]["col_names"][0], [party_data[0]["col_names"][1], 7])
collect(mult, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
pu = PushUp()
pu.rewrite(d)
compare_to_expected(d, expected)
@pytest.mark.parametrize("party_data, expected", [
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateVariance,
AggregateVarianceLocalDiff,
Multiply,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False, False, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [set(), set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateVariance,
AggregateVarianceLocalDiff,
Multiply,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False, False, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1}, {1}, {1}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [set(), set(), set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1},
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"col_names": ["c", "d"],
"stored_with": {2},
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
}
],
{
"node_order": [
Create,
Create,
AggregateSumSquaresAndCount,
AggregateSumSquaresAndCount,
Concat,
AggregateVariance,
AggregateVarianceLocalDiff,
Multiply,
Collect
],
"requires_mpc": [False, False, False, False, True, True, False, False, False],
"ownership_data":[
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}],
"trust_with_sets": [{1, 2}, {1}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}],
"trust_with_sets": [{2}, {2}]
},
{
"stored_with": [{1}],
"plaintext_sets": [{1}, {1}, {1}, {1}],
"trust_with_sets": [{1, 2}, {1}, {1}, {1, 2}]
},
{
"stored_with": [{2}],
"plaintext_sets": [{2}, {2}, {2}, {2}],
"trust_with_sets": [{2}, {2}, {2}, {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [set(), set(), set(), set()],
"trust_with_sets": [{2}, set(), set(), {2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
),
(
[
{
"col_names": ["a", "b"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"col_names": ["c", "d"],
"stored_with": {1, 2},
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
}
],
{
"node_order": [
Create,
Create,
Concat,
AggregateVariance,
AggregateVarianceLocalDiff,
Multiply,
Collect
],
"requires_mpc": [True, True, True, True, False, False, False],
"ownership_data":[
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1, 2}],
"plaintext_sets": [set(), set()],
"trust_with_sets": [set(), set()]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
},
{
"stored_with": [{1}, {2}],
"plaintext_sets": [{1, 2}, {1, 2}],
"trust_with_sets": [{1, 2}, {1, 2}]
}
]
}
)
])
def test_agg_variance(party_data, expected):
cols_in_one = create_cols(party_data[0])
cols_in_two = create_cols(party_data[1])
rel_one = create("in1", cols_in_one, party_data[0]["stored_with"])
rel_two = create("in2", cols_in_two, party_data[1]["stored_with"])
cc = concat([rel_one, rel_two], "concat", party_data[0]["col_names"])
variance = aggregate(cc, "variance", [party_data[0]["col_names"][0]], party_data[0]["col_names"][1], "variance")
mult = multiply(variance, "mult", party_data[0]["col_names"][0], [party_data[0]["col_names"][1], 7])
collect(mult, {1, 2})
d = Dag({rel_one, rel_two})
pd = PushDown()
pd.rewrite(d)
pu = PushUp()
pu.rewrite(d)
compare_to_expected(d, expected)
| 36.220421
| 116
| 0.309513
| 1,727
| 22,348
| 3.75275
| 0.041112
| 0.053078
| 0.156457
| 0.041969
| 0.952631
| 0.952631
| 0.952631
| 0.939978
| 0.939978
| 0.939978
| 0
| 0.054743
| 0.522642
| 22,348
| 616
| 117
| 36.279221
| 0.552775
| 0
| 0
| 0.651163
| 0
| 0
| 0.170933
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003322
| false
| 0
| 0.009967
| 0
| 0.013289
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
957f629c9ce69bd746322a15fc799c745d495052
| 154
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_1/_pkg0_1_1_1_1/__init__.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_1/_pkg0_1_1_1_1/__init__.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_1/_pkg0_1_1_1_1/__init__.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from ._mod0_1_1_1_1_0 import *
from ._mod0_1_1_1_1_1 import *
from ._mod0_1_1_1_1_2 import *
from ._mod0_1_1_1_1_3 import *
from ._mod0_1_1_1_1_4 import *
| 30.8
| 30
| 0.811688
| 40
| 154
| 2.375
| 0.2
| 0.336842
| 0.347368
| 0.252632
| 0.884211
| 0.884211
| 0.757895
| 0
| 0
| 0
| 0
| 0.222222
| 0.123377
| 154
| 5
| 31
| 30.8
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
95fef0908bea31cda36642ddae6f5df42f10b474
| 26,044
|
py
|
Python
|
watcherclient/tests/unit/v1/test_audit_shell.py
|
openstack/python-watcherclient
|
765701da39030df367f27c9c1b8b95b743fb1279
|
[
"Apache-2.0"
] | 17
|
2015-10-18T02:56:38.000Z
|
2019-03-12T11:18:50.000Z
|
watcherclient/tests/unit/v1/test_audit_shell.py
|
stackforge/python-watcherclient
|
765701da39030df367f27c9c1b8b95b743fb1279
|
[
"Apache-2.0"
] | null | null | null |
watcherclient/tests/unit/v1/test_audit_shell.py
|
stackforge/python-watcherclient
|
765701da39030df367f27c9c1b8b95b743fb1279
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2013 IBM Corp
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from unittest import mock
import six
from watcherclient import shell
from watcherclient.tests.unit.v1 import base
from watcherclient import v1 as resource
AUDIT_TEMPLATE_1 = {
'uuid': 'f8e47706-efcf-49a4-a5c4-af604eb492f2',
'name': 'at1',
'description': 'Audit Template 1 description',
'goal_uuid': 'fc087747-61be-4aad-8126-b701731ae836',
'strategy_uuid': '2cf86250-d309-4b81-818e-1537f3dba6e5',
'created_at': datetime.datetime.now().isoformat(),
'updated_at': None,
'deleted_at': None,
}
GOAL_1 = {
'uuid': "fc087747-61be-4aad-8126-b701731ae836",
'name': "SERVER_CONSOLIDATION",
'display_name': 'Server Consolidation',
'created_at': datetime.datetime.now().isoformat(),
'updated_at': None,
'deleted_at': None,
}
STRATEGY_1 = {
'uuid': '2cf86250-d309-4b81-818e-1537f3dba6e5',
'name': 'basic',
'display_name': 'Basic consolidation',
'goal_uuid': 'fc087747-61be-4aad-8126-b701731ae836',
'created_at': datetime.datetime.now().isoformat(),
'updated_at': None,
'deleted_at': None,
}
class AuditShellTest(base.CommandTestCase):
AUDIT_1 = {
'uuid': '5869da81-4876-4687-a1ed-12cd64cf53d9',
'audit_type': 'ONESHOT',
'state': 'PENDING',
'audit_template_uuid': 'f8e47706-efcf-49a4-a5c4-af604eb492f2',
'audit_template_name': 'at1',
'goal_name': 'SERVER_CONSOLIDATION',
'strategy_name': 'basic',
'created_at': datetime.datetime.now().isoformat(),
'updated_at': None,
'deleted_at': None,
'parameters': None,
'interval': None,
'scope': '',
'auto_trigger': False,
'next_run_time': None,
'name': 'my_audit1',
'hostname': '',
}
AUDIT_2 = {
'uuid': 'a5199d0e-0702-4613-9234-5ae2af8dafea',
'audit_type': 'ONESHOT',
'state': 'PENDING',
'audit_template_uuid': 'f8e47706-efcf-49a4-a5c4-af604eb492f2',
'audit_template_name': 'at1',
'goal_name': 'fc087747-61be-4aad-8126-b701731ae836',
'strategy_name': 'auto',
'created_at': datetime.datetime.now().isoformat(),
'updated_at': None,
'deleted_at': None,
'parameters': None,
'interval': None,
'scope': '',
'auto_trigger': False,
'next_run_time': None,
'name': 'my_audit2',
'hostname': '',
}
AUDIT_3 = {
'uuid': '43199d0e-0712-1213-9674-5ae2af8dhgte',
'audit_type': 'ONESHOT',
'state': 'PENDING',
'audit_template_uuid': 'f8e47706-efcf-49a4-a5c4-af604eb492f2',
'audit_template_name': 'at1',
'goal_name': None,
'strategy_name': 'auto',
'created_at': datetime.datetime.now().isoformat(),
'updated_at': None,
'deleted_at': None,
'parameters': None,
'interval': 3600,
'scope': '',
'auto_trigger': True,
'next_run_time': None,
'name': 'my_audit3',
'hostname': '',
}
SHORT_LIST_FIELDS = ['uuid', 'name', 'audit_type',
'state', 'goal_name', 'strategy_name',
'auto_trigger']
SHORT_LIST_FIELD_LABELS = ['UUID', 'Name', 'Audit Type', 'State', 'Goal',
'Strategy', 'Auto Trigger']
FIELDS = ['uuid', 'name', 'created_at', 'updated_at', 'deleted_at',
'state', 'audit_type', 'parameters', 'interval', 'goal_name',
'strategy_name', 'scope', 'auto_trigger', 'next_run_time',
'hostname']
FIELD_LABELS = ['UUID', 'Name', 'Created At', 'Updated At', 'Deleted At',
'State', 'Audit Type', 'Parameters', 'Interval', 'Goal',
'Strategy', 'Audit Scope', 'Auto Trigger',
'Next Run Time', 'Hostname']
def setUp(self, os_infra_optim_api_version='1.0'):
super(AuditShellTest, self).setUp(
os_infra_optim_api_version=os_infra_optim_api_version)
# goal mock
p_goal_manager = mock.patch.object(resource, 'GoalManager')
self.m_goal_mgr_cls = p_goal_manager.start()
self.addCleanup(p_goal_manager.stop)
self.m_goal_mgr = mock.Mock()
self.m_goal_mgr_cls.return_value = self.m_goal_mgr
# strategy mock
p_strategy_manager = mock.patch.object(resource, 'StrategyManager')
self.m_strategy_mgr_cls = p_strategy_manager.start()
self.addCleanup(p_strategy_manager.stop)
self.m_strategy_mgr = mock.Mock()
self.m_strategy_mgr_cls.return_value = self.m_strategy_mgr
p_audit_manager = mock.patch.object(resource, 'AuditManager')
p_audit_template_manager = mock.patch.object(
resource, 'AuditTemplateManager')
self.m_audit_mgr_cls = p_audit_manager.start()
self.m_audit_template_mgr_cls = p_audit_template_manager.start()
self.addCleanup(p_audit_manager.stop)
self.addCleanup(p_audit_template_manager.stop)
self.m_audit_mgr = mock.Mock()
self.m_audit_template_mgr = mock.Mock()
self.m_audit_mgr_cls.return_value = self.m_audit_mgr
self.m_audit_template_mgr_cls.return_value = self.m_audit_template_mgr
# stdout mock
self.stdout = six.StringIO()
self.cmd = shell.WatcherShell(stdout=self.stdout)
def test_do_audit_list(self):
audit1 = resource.Audit(mock.Mock(), self.AUDIT_1)
audit2 = resource.Audit(mock.Mock(), self.AUDIT_2)
self.m_audit_mgr.list.return_value = [
audit1, audit2]
exit_code, results = self.run_cmd('audit list')
self.assertEqual(0, exit_code)
self.assertEqual(
[self.resource_as_dict(audit1, self.SHORT_LIST_FIELDS,
self.SHORT_LIST_FIELD_LABELS),
self.resource_as_dict(audit2, self.SHORT_LIST_FIELDS,
self.SHORT_LIST_FIELD_LABELS)],
results)
self.m_audit_mgr.list.assert_called_once_with(detail=False)
def test_do_audit_list_marker(self):
audit2 = resource.Audit(mock.Mock(), self.AUDIT_2)
self.m_audit_mgr.list.return_value = [audit2]
exit_code, results = self.run_cmd(
'audit list --marker 5869da81-4876-4687-a1ed-12cd64cf53d9')
self.assertEqual(0, exit_code)
self.assertEqual(
[self.resource_as_dict(audit2, self.SHORT_LIST_FIELDS,
self.SHORT_LIST_FIELD_LABELS)],
results)
self.m_audit_mgr.list.assert_called_once_with(
detail=False,
marker='5869da81-4876-4687-a1ed-12cd64cf53d9')
def test_do_audit_list_detail(self):
audit1 = resource.Audit(mock.Mock(), self.AUDIT_1)
audit2 = resource.Audit(mock.Mock(), self.AUDIT_2)
self.m_audit_mgr.list.return_value = [
audit1, audit2]
exit_code, results = self.run_cmd('audit list --detail')
self.assertEqual(0, exit_code)
self.assertEqual(
[self.resource_as_dict(audit1, self.FIELDS,
self.FIELD_LABELS),
self.resource_as_dict(audit2, self.FIELDS,
self.FIELD_LABELS)],
results)
self.m_audit_mgr.list.assert_called_once_with(detail=True)
def test_do_audit_show_by_uuid(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.get.return_value = audit
exit_code, result = self.run_cmd(
'audit show 5869da81-4876-4687-a1ed-12cd64cf53d9')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.get.assert_called_once_with(
'5869da81-4876-4687-a1ed-12cd64cf53d9')
def test_do_audit_show_by_name(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.get.return_value = audit
exit_code, result = self.run_cmd(
'audit show my_audit')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.get.assert_called_once_with(
'my_audit')
def test_do_audit_delete(self):
self.m_audit_mgr.delete.return_value = ''
exit_code, result = self.run_cmd(
'audit delete 5869da81-4876-4687-a1ed-12cd64cf53d9',
formatting=None)
self.assertEqual(0, exit_code)
self.assertEqual('', result)
self.m_audit_mgr.delete.assert_called_once_with(
'5869da81-4876-4687-a1ed-12cd64cf53d9')
def test_do_audit_delete_by_name(self):
self.m_audit_mgr.delete.return_value = ''
exit_code, result = self.run_cmd(
'audit delete my_audit',
formatting=None)
self.assertEqual(0, exit_code)
self.assertEqual('', result)
self.m_audit_mgr.delete.assert_called_once_with(
'my_audit')
def test_do_audit_delete_multiple(self):
self.m_audit_mgr.delete.return_value = ''
exit_code, result = self.run_cmd(
'audit delete 5869da81-4876-4687-a1ed-12cd64cf53d9 '
'5b157edd-5a7e-4aaa-b511-f7b33ec86e9f',
formatting=None)
self.assertEqual(0, exit_code)
self.assertEqual('', result)
self.m_audit_mgr.delete.assert_any_call(
'5869da81-4876-4687-a1ed-12cd64cf53d9')
self.m_audit_mgr.delete.assert_any_call(
'5b157edd-5a7e-4aaa-b511-f7b33ec86e9f')
def test_do_audit_update(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.update.return_value = audit
exit_code, result = self.run_cmd(
'audit update 5869da81-4876-4687-a1ed-12cd64cf53d9 '
'replace state=PENDING')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.update.assert_called_once_with(
'5869da81-4876-4687-a1ed-12cd64cf53d9',
[{'op': 'replace', 'path': '/state', 'value': 'PENDING'}])
def test_do_audit_update_by_name(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.update.return_value = audit
exit_code, result = self.run_cmd(
'audit update my_audit replace state=PENDING')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.update.assert_called_once_with(
'my_audit',
[{'op': 'replace', 'path': '/state', 'value': 'PENDING'}])
def test_do_audit_create_with_audit_template_uuid(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_3)
audit_template = resource.AuditTemplate(mock.Mock(), AUDIT_TEMPLATE_1)
self.m_audit_template_mgr.get.return_value = audit_template
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -a f8e47706-efcf-49a4-a5c4-af604eb492f2')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
audit_template_uuid='f8e47706-efcf-49a4-a5c4-af604eb492f2',
audit_type='ONESHOT',
auto_trigger=False
)
def test_do_audit_create_with_audit_template_name(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_3)
audit_template = resource.AuditTemplate(mock.Mock(), AUDIT_TEMPLATE_1)
self.m_audit_template_mgr.get.return_value = audit_template
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd('audit create -a at1')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
audit_template_uuid='f8e47706-efcf-49a4-a5c4-af604eb492f2',
auto_trigger=False,
audit_type='ONESHOT'
)
def test_do_audit_create_with_goal(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
auto_trigger=False,
audit_type='ONESHOT'
)
def test_do_audit_create_with_goal_and_strategy(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 -s '
'2cf86250-d309-4b81-818e-1537f3dba6e5')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
strategy='2cf86250-d309-4b81-818e-1537f3dba6e5',
auto_trigger=False,
audit_type='ONESHOT'
)
def test_do_audit_create_with_type(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 -t ONESHOT')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
auto_trigger=False,
audit_type='ONESHOT'
)
def test_do_audit_create_with_parameter(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 -p para1=10 '
'-p para2=20')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
audit_type='ONESHOT',
auto_trigger=False,
parameters={'para1': 10, 'para2': 20}
)
def test_do_audit_create_with_type_event(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 -t EVENT')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
auto_trigger=False,
audit_type='EVENT'
)
def test_do_audit_create_with_type_continuous(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 '
'-t CONTINUOUS -i 3600')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
audit_type='CONTINUOUS',
auto_trigger=False,
interval='3600'
)
def test_do_audit_create_with_name(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 '
'-t CONTINUOUS -i 3600 --name my_audit')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
audit_type='CONTINUOUS',
auto_trigger=False,
interval='3600',
name='my_audit'
)
class AuditShellTestv11(AuditShellTest):
def setUp(self):
super(AuditShellTestv11, self).setUp(os_infra_optim_api_version='1.1')
v11 = dict(start_time=None, end_time=None)
for audit in (self.AUDIT_1, self.AUDIT_2, self.AUDIT_3):
audit.update(v11)
self.FIELDS.extend(['start_time', 'end_time'])
self.FIELD_LABELS.extend(['Start Time', 'End Time'])
class AuditShellTestv12(AuditShellTest):
def setUp(self):
super(AuditShellTestv12, self).setUp(os_infra_optim_api_version='1.2')
v11 = dict(start_time=None, end_time=None)
v12 = dict(force=False)
for audit in (self.AUDIT_1, self.AUDIT_2, self.AUDIT_3):
audit.update(v11)
audit.update(v12)
self.FIELDS.extend(['start_time', 'end_time', 'force'])
self.FIELD_LABELS.extend(['Start Time', 'End Time', 'Force'])
def test_do_audit_create_with_force(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_3)
audit_template = resource.AuditTemplate(mock.Mock(), AUDIT_TEMPLATE_1)
self.m_audit_template_mgr.get.return_value = audit_template
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -a f8e47706-efcf-49a4-a5c4-af604eb492f2 --force')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
audit_template_uuid='f8e47706-efcf-49a4-a5c4-af604eb492f2',
audit_type='ONESHOT',
auto_trigger=False,
force=True
)
def test_do_audit_create_with_audit_template_uuid(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_3)
audit_template = resource.AuditTemplate(mock.Mock(), AUDIT_TEMPLATE_1)
self.m_audit_template_mgr.get.return_value = audit_template
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -a f8e47706-efcf-49a4-a5c4-af604eb492f2')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
audit_template_uuid='f8e47706-efcf-49a4-a5c4-af604eb492f2',
audit_type='ONESHOT',
auto_trigger=False,
force=False
)
def test_do_audit_create_with_audit_template_name(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_3)
audit_template = resource.AuditTemplate(mock.Mock(), AUDIT_TEMPLATE_1)
self.m_audit_template_mgr.get.return_value = audit_template
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd('audit create -a at1')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
audit_template_uuid='f8e47706-efcf-49a4-a5c4-af604eb492f2',
auto_trigger=False,
audit_type='ONESHOT',
force=False
)
def test_do_audit_create_with_goal(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
auto_trigger=False,
audit_type='ONESHOT',
force=False
)
def test_do_audit_create_with_goal_and_strategy(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 -s '
'2cf86250-d309-4b81-818e-1537f3dba6e5')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
strategy='2cf86250-d309-4b81-818e-1537f3dba6e5',
auto_trigger=False,
audit_type='ONESHOT',
force=False
)
def test_do_audit_create_with_type(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 -t ONESHOT')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
auto_trigger=False,
audit_type='ONESHOT',
force=False
)
def test_do_audit_create_with_parameter(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 -p para1=10 '
'-p para2=20')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
audit_type='ONESHOT',
auto_trigger=False,
parameters={'para1': 10, 'para2': 20},
force=False
)
def test_do_audit_create_with_type_continuous(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 '
'-t CONTINUOUS -i 3600')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
audit_type='CONTINUOUS',
auto_trigger=False,
interval='3600',
force=False
)
def test_do_audit_create_with_type_event(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 -t EVENT')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
auto_trigger=False,
audit_type='EVENT',
force=False
)
def test_do_audit_create_with_name(self):
audit = resource.Audit(mock.Mock(), self.AUDIT_1)
self.m_audit_mgr.create.return_value = audit
exit_code, result = self.run_cmd(
'audit create -g fc087747-61be-4aad-8126-b701731ae836 '
'-t CONTINUOUS -i 3600 --name my_audit')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(audit, self.FIELDS, self.FIELD_LABELS),
result)
self.m_audit_mgr.create.assert_called_once_with(
goal='fc087747-61be-4aad-8126-b701731ae836',
audit_type='CONTINUOUS',
auto_trigger=False,
interval='3600',
name='my_audit',
force=False
)
| 37.152639
| 79
| 0.631777
| 3,180
| 26,044
| 4.894025
| 0.072327
| 0.025702
| 0.046264
| 0.052625
| 0.891216
| 0.850993
| 0.817773
| 0.798561
| 0.780055
| 0.768425
| 0
| 0.08092
| 0.258831
| 26,044
| 700
| 80
| 37.205714
| 0.725328
| 0.022846
| 0
| 0.71831
| 0
| 0
| 0.187252
| 0.092014
| 0
| 0
| 0
| 0
| 0.15493
| 1
| 0.056338
| false
| 0
| 0.010563
| 0
| 0.084507
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
254160d1dd7cb4e489aa976a263004ee68c0094f
| 576
|
py
|
Python
|
src/tensor/op/functor/conversion.py
|
jedhsu/tensor
|
3b2fe21029fa7c50b034190e77d79d1a94ea5e8f
|
[
"Apache-2.0"
] | null | null | null |
src/tensor/op/functor/conversion.py
|
jedhsu/tensor
|
3b2fe21029fa7c50b034190e77d79d1a94ea5e8f
|
[
"Apache-2.0"
] | null | null | null |
src/tensor/op/functor/conversion.py
|
jedhsu/tensor
|
3b2fe21029fa7c50b034190e77d79d1a94ea5e8f
|
[
"Apache-2.0"
] | null | null | null |
"""
Type casting operators.
"""
import jax.numpy as jnp
from .base import ArrayOperator
class CastingOperator(ArrayOperator):
"""
Type promotion and demotion.
"""
Promote = jnp.promote_types
BitIntoU8 = jnp.packbits # [TODO] clarify
U8IntoBit = jnp.unpackbits
"""
Type conversion / casting operators.
"""
import jax.numpy as jnp
from .base import ArrayOperator
class CastingOperator(ArrayOperator):
"""
Type promotion and demotion.
"""
Promote = jnp.promote_types
EulerGamma = jnp.euler_gamma # [TODO] clarify
| 13.090909
| 50
| 0.677083
| 61
| 576
| 6.344262
| 0.459016
| 0.082687
| 0.113695
| 0.129199
| 0.728682
| 0.728682
| 0.728682
| 0.728682
| 0.728682
| 0.728682
| 0
| 0.004494
| 0.227431
| 576
| 43
| 51
| 13.395349
| 0.865169
| 0.194444
| 0
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046512
| 0
| 1
| 0
| false
| 0
| 0.363636
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
2566aa339f399ad9acbc15ed6fc483193462238c
| 11,513
|
py
|
Python
|
liveodds/processing.py
|
nik849/LiveOdds
|
678b447529ab2367459f7937b02048d3eba7ce79
|
[
"MIT"
] | null | null | null |
liveodds/processing.py
|
nik849/LiveOdds
|
678b447529ab2367459f7937b02048d3eba7ce79
|
[
"MIT"
] | null | null | null |
liveodds/processing.py
|
nik849/LiveOdds
|
678b447529ab2367459f7937b02048d3eba7ce79
|
[
"MIT"
] | null | null | null |
from numpy import where
def process(data, tc_data, leagues):
"""
Processing method for returning liveodds from totalcorner request
"""
overall_results = []
overall_preds = []
results_preds = {}
league_dict = {}
results = {}
teams = []
for match in tc_data:
teams.append(match["h"])
teams.append(match["a"])
league_dict.update({match.get("h"):0})
league_dict.update({match.get("a"):0})
try:
leagueset = set(list(leagues.keys()))
teamset = set(teams)
except AttributeError:
print('No teams specified.')
leagues = {}
leagues['0'] = 0
for league, coeff in leagues.items():
league_dict[league] = coeff
for match in tc_data:
gol_linestr = match["p_goal_h"][0].strip("'").split(", ")
try:
gol_line = float(gol_linestr)
except TypeError:
try:
gol_line = max([float(x) for x in gol_linestr])
except ValueError:
gol_line = 0
if match["status"] == 'half':
match["status"] = 45
if int(data["Min"]) == int(match["status"]):
calcs = {}
unwanted_keys = ['h_id', 'a_id', 'hc', 'ac', 'hrc', 'arc',
'hyc', 'ayc', 'hf_hc', 'hf_ac', 'hf_hg', 'hf_ag', 'ish',
'hp', 'ap', 'asian_corner']
calcs.update({'Minute': match["status"]})
#results_preds.update(match)
calcs["Nation"] = f'{match["h"]} vs {match["a"]}'
calcs["CoNz"] = float(league_dict.get(match["h"]))
if calcs["CoNz"] == 0:
calcs["CoNz"] = float(league_dict.get(match["a"]))
match["attacks_h"] = []
match["shot_on_h"] = []
calcs["Datk"] = round((float(match["dang_attacks"][0]) - float(match["dang_attacks"][1]))\
/ float(data["Min"]), 2)
calcs["GttpH"] = float(match["shot_on"][0]) * float(data["Ptph"])
calcs["GttfH"] = float(match["shot_off"][0]) * float(data["Ptfh"])
calcs["GttpA"] = float(match["shot_on"][1]) * float(data["Ptpa"])
calcs["GttfA"] = float(match["shot_off"][1]) * float(data["Ptfa"])
calcs["GtapH"] = float(match["dang_attacks"][0]) * float(data["PAtpH"])
calcs["GtapA"] = float(match["dang_attacks"][0]) * float(data["Patpa"])
calcs["GtHm"] = int((calcs["GttpH"] + calcs["GttfH"] + calcs["GtapH"])\
+ calcs["Datk"] / 3)
calcs["gtam"] = int((calcs["GttpH"] + calcs["GttfH"] + calcs["GtapH"])\
- calcs["Datk"] / 3)
calcs["SgtM"] = where((where(calcs["GtHm"] < float(data["CofMinH"]), 0,
calcs["GtHm"]) + where(calcs["gtam"] <
float(data["CofMinA"]), 0, calcs["gtam"])
+ calcs["CoNz"] < 0), 0, (where(calcs["GtHm"] < calcs["gtam"], 0, calcs["GtHm"])
+ where(calcs["gtam"] < float(data["CofMinA"]), 0, calcs["gtam"]) + calcs["CoNz"]))
calcs["Sgmx"] = int(match["hg"]) + int(match["ag"])
calcs["DeltaM"] = round(((
float(match["hg"]) - float(calcs["GtHm"])) * (float(match["ag"]) -
calcs["gtam"]) * (float(calcs["Sgmx"]) - float(calcs["SgtM"])
)) / 3, 2)
calcs["GtFh"] = where(where(calcs["DeltaM"] > 1, ((int(match["hg"]) *
calcs["GtHm"]) / 2) / data["Min"] * data["CoefMinH"] -
(calcs["DeltaM"] / 3), ((int(match["hg"]) * calcs["GtHm"]) / 2)
/ int(data["Min"]) * where(int(data["CoefMinH"]) < int(match["hg"]), int(match["hg"]),
where(calcs["DeltaM"] > 1, ((int(match["hg"]) * calcs["GtHm"]) / 2)
/ data["Min"] * data["CoefMinH"] - (calcs["DeltaM"] / 3),
((int(match["hg"]) * calcs["GtHm"]) / 2)) / data["Min"] *
data["CoefMinH"])))
calcs["GtFh"] = where(where(calcs["DeltaM"] > 1, ((int(match["hg"]) * \
calcs["GtHm"]) / 2) / data["Min"] * data["CoefMinH"] -
(calcs["DeltaM"] / 3), ((int(match["hg"]) * calcs["GtHm"]) / 2) /
data["Min"] * data["CoefMinH"]) < int(match["hg"]), int(match["hg"]),
where(calcs["DeltaM"] > 1, ((int(match["hg"]) * calcs["GtHm"]) / 2)
/ data["Min"] * data["CoefMinH"] - (calcs["DeltaM"] / 3),
((int(match["hg"]) * calcs["GtHm"]) / 2) / data["Min"] *
data["CoefMinH"]))
calcs["GtFa"] = where(where(calcs["DeltaM"] < -1, ((int(match["ag"]) *
calcs["gtam"]) / 2) / 50 * 90 + (calcs["DeltaM"] / 3),
((int(match["ag"]) * calcs["gtam"]) / 2) / 50 * 90) < int(match["ag"]),
int(match["ag"]), where(calcs["DeltaM"] < -1, ((int(match["ag"]) *
calcs["gtam"]) / 2) / 50 * 90 + (calcs["DeltaM"] / 3),
((int(match["ag"]) * calcs["gtam"]) / 2) / 50 * 90))
calcs["sgtft1"] = round(where(calcs["GtFh"] < 0, 5, calcs["GtFh"]) + \
where(calcs["GtFa"] < 0, 7, calcs["GtFa"]) + \
where(calcs["DeltaM"] > data["CoefMaxSgt"], data["ValueMax"],
where(calcs["DeltaM"] < data["CoefMinSgt"], data["ValueMin"],
0)) + float(calcs["CoNz"]), 2)
calcs["sgtft"] = round(((calcs["sgtft1"] * where(float(data["Min"]) > 75,
calcs["Sgmx"], calcs["sgtft1"]) * where((float(data["Min"]) <
46 and float(data["Min"]) > 35), calcs["Sgmx"], (calcs["GtHm"] +
calcs["gtam"]))) / 3) + (calcs["CoNz"] / 3), 2)
i_goalstr = match["i_goal"][0].strip("'").split(", ")
try:
i_goal = float(i_goalstr)
except TypeError:
try:
i_goal = max([float(x) for x in i_goalstr])
except ValueError:
i_goal = 0
if calcs["sgtft1"] - i_goal:
string = 'Over'
else:
string = 'Under'
calcs["U/O"] = f'{string} {gol_line}'
print(match)
overall_preds.append(calcs)
if match["status"] == 'full':
calcs = {}
unwanted_keys = ['h_id', 'a_id', 'hc', 'ac', 'hrc', 'arc',
'hyc', 'ayc', 'hf_hc', 'hf_ac', 'hf_hg', 'hf_ag', 'ish',
'hp', 'ap', 'asian_corner']
calcs.update({'Minute': match["status"]})
#results_preds.update(match)
calcs["Nation"] = f'{match["h"]} vs {match["a"]}'
calcs["CoNz"] = float(league_dict.get(match["h"]))
if calcs["CoNz"] == 0:
calcs["CoNz"] = float(league_dict.get(match["a"]))
match["attacks_h"] = []
match["shot_on_h"] = []
calcs["Datk"] = round((float(match["dang_attacks"][0]) - float(match["dang_attacks"][1]))\
/ float(data["Min"]), 2)
calcs["GttpH"] = float(match["shot_on"][0]) * float(data["Ptph"])
calcs["GttfH"] = float(match["shot_off"][0]) * float(data["Ptfh"])
calcs["GttpA"] = float(match["shot_on"][1]) * float(data["Ptpa"])
calcs["GttfA"] = float(match["shot_off"][1]) * float(data["Ptfa"])
calcs["GtapH"] = float(match["dang_attacks"][0]) * float(data["PAtpH"])
calcs["GtapA"] = float(match["dang_attacks"][0]) * float(data["Patpa"])
calcs["GtHm"] = int((calcs["GttpH"] + calcs["GttfH"] + calcs["GtapH"])\
+ calcs["Datk"] / 3)
calcs["gtam"] = int((calcs["GttpH"] + calcs["GttfH"] + calcs["GtapH"])\
- calcs["Datk"] / 3)
calcs["SgtM"] = where((where(calcs["GtHm"] < float(data["CofMinH"]), 0,
calcs["GtHm"]) + where(calcs["gtam"] <
float(data["CofMinA"]), 0, calcs["gtam"])
+ calcs["CoNz"] < 0), 0, (where(calcs["GtHm"] < calcs["gtam"], 0, calcs["GtHm"])
+ where(calcs["gtam"] < float(data["CofMinA"]), 0, calcs["gtam"]) + calcs["CoNz"]))
calcs["Sgmx"] = int(match["hg"]) + int(match["ag"])
calcs["DeltaM"] = ((
float(match["hg"]) - float(calcs["GtHm"])) * (float(match["ag"]) -
calcs["gtam"]) * (float(calcs["Sgmx"]) - float(calcs["SgtM"])
)) / 3
calcs["GtFh"] = where(where(calcs["DeltaM"] > 1, ((int(match["hg"]) *
calcs["GtHm"]) / 2) / data["Min"] * data["CoefMinH"] -
(calcs["DeltaM"] / 3), ((int(match["hg"]) * calcs["GtHm"]) / 2)
/ int(data["Min"]) * where(int(data["CoefMinH"]) < int(match["hg"]), int(match["hg"]),
where(calcs["DeltaM"] > 1, ((int(match["hg"]) * calcs["GtHm"]) / 2)
/ data["Min"] * data["CoefMinH"] - (calcs["DeltaM"] / 3),
((int(match["hg"]) * calcs["GtHm"]) / 2)) / data["Min"] *
data["CoefMinH"])))
calcs["GtFh"] = where(where(calcs["DeltaM"] > 1, ((int(match["hg"]) * \
calcs["GtHm"]) / 2) / data["Min"] * data["CoefMinH"] -
(calcs["DeltaM"] / 3), ((int(match["hg"]) * calcs["GtHm"]) / 2) /
data["Min"] * data["CoefMinH"]) < int(match["hg"]), int(match["hg"]),
where(calcs["DeltaM"] > 1, ((int(match["hg"]) * calcs["GtHm"]) / 2)
/ data["Min"] * data["CoefMinH"] - (calcs["DeltaM"] / 3),
((int(match["hg"]) * calcs["GtHm"]) / 2) / data["Min"] *
data["CoefMinH"]))
calcs["GtFa"] = where(where(calcs["DeltaM"] < -1, ((int(match["ag"]) *
calcs["gtam"]) / 2) / 50 * 90 + (calcs["DeltaM"] / 3),
((int(match["ag"]) * calcs["gtam"]) / 2) / 50 * 90) < int(match["ag"]),
int(match["ag"]), where(calcs["DeltaM"] < -1, ((int(match["ag"]) *
calcs["gtam"]) / 2) / 50 * 90 + (calcs["DeltaM"] / 3),
((int(match["ag"]) * calcs["gtam"]) / 2) / 50 * 90))
calcs["sgtft1"] = where(calcs["GtFh"] < 0, 5, calcs["GtFh"]) + \
where(calcs["GtFa"] < 0, 7, calcs["GtFa"]) + \
where(calcs["DeltaM"] > data["CoefMaxSgt"], data["ValueMax"],
where(calcs["DeltaM"] < data["CoefMinSgt"], data["ValueMin"],
0)) + float(calcs["CoNz"])
calcs["sgtft"] = ((calcs["sgtft1"] * where(float(data["Min"]) > 75,
calcs["Sgmx"], calcs["sgtft1"]) * where((float(data["Min"]) <
46 and float(data["Min"]) > 35), calcs["Sgmx"], (calcs["GtHm"] +
calcs["gtam"]))) / 3) + (calcs["CoNz"] / 3)
if calcs["sgtft1"] - gol_line:
string = 'Over'
else:
string = 'Under'
calcs["U/O"] = f'{string} {gol_line}'
overall_results.append(calcs)
if len(results) > 0:
for key in unwanted_keys:
try:
del results[key]
except KeyError:
pass
if len(results_preds) > 0:
for key in unwanted_keys:
try:
del results_preds[key]
except KeyError:
pass
return overall_preds, overall_results
| 51.168889
| 117
| 0.448797
| 1,249
| 11,513
| 4.072058
| 0.115292
| 0.064491
| 0.051121
| 0.047188
| 0.825403
| 0.809674
| 0.803775
| 0.803775
| 0.803775
| 0.790405
| 0
| 0.02161
| 0.328759
| 11,513
| 224
| 118
| 51.397321
| 0.636517
| 0.010423
| 0
| 0.725888
| 0
| 0
| 0.155327
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005076
| false
| 0.010152
| 0.005076
| 0
| 0.015228
| 0.010152
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c2665b3172487a86291cbc5a2e9228c5346150ca
| 7,747
|
py
|
Python
|
bindings/python/ensmallen_graph/datasets/linqs/citeseer.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen_graph/datasets/linqs/citeseer.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
bindings/python/ensmallen_graph/datasets/linqs/citeseer.py
|
caufieldjh/ensmallen_graph
|
14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a
|
[
"MIT"
] | null | null | null |
"""
This file offers the methods to automatically retrieve the graph CiteSeer.
The graph is automatically retrieved from the LINQS repository.
The CiteSeer dataset consists of 3312 scientific publications classified
into one of six classes. The citation network consists of 4732 links. Each
publication in the dataset is described by a 0/1-valued word vector indicating
the absence/presence of the corresponding word from the dictionary. The
dictionary consists of 3703 unique words.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-13 10:41:49.820142
The undirected graph CiteSeer has 7030 nodes with 8 different node types:
the 5 most common are Word (nodes number 3703), DB (nodes number 701),
IR (nodes number 668), Agents (nodes number 596) and ML (nodes number 590)
and 109841 unweighted edges with 2 different edge types: Paper2Word and
Paper2Paper, of which 124 are self-loops. The graph is sparse as it has
a density of 0.00444 and is connected, as it has a single component. The
graph median node degree is 28, the mean node degree is 31.23, and the
node degree mode is 5. The top 5 most central nodes are word_2568 (degree
704), word_65 (degree 670), word_729 (degree 651), word_601 (degree 627)
and word_2615 (degree 607).
References
---------------------
Please cite the following if you use the data:
@incollection{getoor2005link,
title={Link-based classification},
author={Getoor, Lise},
booktitle={Advanced methods for knowledge discovery from complex data},
pages={189--207},
year={2005},
publisher={Springer}
}
@article{sen2008collective,
title={Collective classification in network data},
author={Sen, Prithviraj and Namata, Galileo and Bilgic, Mustafa and Getoor, Lise and Galligher, Brian and Eliassi-Rad, Tina},
journal={AI magazine},
volume={29},
number={3},
pages={93--93},
year={2008}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.linqs import CiteSeer
# Then load the graph
graph = CiteSeer()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from .parse_linqs import parse_linqs_incidence_matrix
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def CiteSeer(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/linqs",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the CiteSeer graph.
The graph is automatically retrieved from the LINQS repository.
The CiteSeer dataset consists of 3312 scientific publications classified
into one of six classes. The citation network consists of 4732 links. Each
publication in the dataset is described by a 0/1-valued word vector indicating
the absence/presence of the corresponding word from the dictionary. The
dictionary consists of 3703 unique words.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of CiteSeer graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-13 10:41:49.820142
The undirected graph CiteSeer has 7030 nodes with 8 different node types:
the 5 most common are Word (nodes number 3703), DB (nodes number 701),
IR (nodes number 668), Agents (nodes number 596) and ML (nodes number 590)
and 109841 unweighted edges with 2 different edge types: Paper2Word and
Paper2Paper, of which 124 are self-loops. The graph is sparse as it has
a density of 0.00444 and is connected, as it has a single component. The
graph median node degree is 28, the mean node degree is 31.23, and the
node degree mode is 5. The top 5 most central nodes are word_2568 (degree
704), word_65 (degree 670), word_729 (degree 651), word_601 (degree 627)
and word_2615 (degree 607).
References
---------------------
Please cite the following if you use the data:
@incollection{getoor2005link,
title={Link-based classification},
author={Getoor, Lise},
booktitle={Advanced methods for knowledge discovery from complex data},
pages={189--207},
year={2005},
publisher={Springer}
}
@article{sen2008collective,
title={Collective classification in network data},
author={Sen, Prithviraj and Namata, Galileo and Bilgic, Mustafa and Getoor, Lise and Galligher, Brian and Eliassi-Rad, Tina},
journal={AI magazine},
volume={29},
number={3},
pages={93--93},
year={2008}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.linqs import CiteSeer
# Then load the graph
graph = CiteSeer()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="CiteSeer",
dataset="linqs",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs,
callbacks=[
parse_linqs_incidence_matrix
],
callbacks_arguments=[
{
"cites_path": "linqs/citeseer/citeseer/citeseer.cites",
"content_path": "linqs/citeseer/citeseer/citeseer.content",
"node_list_path": "linqs/citeseer/nodes.tsv",
"edge_list_path": "linqs/citeseer/edges.tsv"
}
]
)()
| 33.97807
| 128
| 0.70324
| 1,063
| 7,747
| 5.068674
| 0.267168
| 0.025241
| 0.019488
| 0.005939
| 0.821826
| 0.809577
| 0.809577
| 0.809577
| 0.809577
| 0.809577
| 0
| 0.049322
| 0.20963
| 7,747
| 227
| 129
| 34.127753
| 0.830639
| 0.898541
| 0
| 0
| 0
| 0
| 0.212249
| 0.133052
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.137931
| 0
| 0.206897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c2e3994a67949e9860b543ab0474157d7323beca
| 34
|
py
|
Python
|
bar/bar.py
|
PawelBogdan/AdditionalLibrary
|
ea7dfd69b998c57af7f7bc7490dbe26b649b4679
|
[
"MIT"
] | null | null | null |
bar/bar.py
|
PawelBogdan/AdditionalLibrary
|
ea7dfd69b998c57af7f7bc7490dbe26b649b4679
|
[
"MIT"
] | null | null | null |
bar/bar.py
|
PawelBogdan/AdditionalLibrary
|
ea7dfd69b998c57af7f7bc7490dbe26b649b4679
|
[
"MIT"
] | null | null | null |
def f():
return "FOO BAR!!!"
| 8.5
| 23
| 0.470588
| 5
| 34
| 3.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 34
| 3
| 24
| 11.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
6c058c2e51b5e7ae0172dc1a97021a132586c837
| 289
|
py
|
Python
|
src/vigorish/cli/components/__init__.py
|
a-luna/vigorish
|
6cede5ced76c7d2c9ad0aacdbd2b18c2f1ee4ee6
|
[
"MIT"
] | 2
|
2021-07-15T13:53:33.000Z
|
2021-07-25T17:03:29.000Z
|
src/vigorish/cli/components/__init__.py
|
a-luna/vigorish
|
6cede5ced76c7d2c9ad0aacdbd2b18c2f1ee4ee6
|
[
"MIT"
] | 650
|
2019-05-18T07:00:12.000Z
|
2022-01-21T19:38:55.000Z
|
src/vigorish/cli/components/__init__.py
|
a-luna/vigorish
|
6cede5ced76c7d2c9ad0aacdbd2b18c2f1ee4ee6
|
[
"MIT"
] | 2
|
2020-03-28T21:01:31.000Z
|
2022-01-06T05:16:11.000Z
|
# flake8: noqa
from vigorish.cli.components.data_set_check import DataSetCheck
from vigorish.cli.components.date_input import DateInput
from vigorish.cli.components.job_name_input import JobNameInput
from vigorish.cli.components.prompts import *
from vigorish.cli.components.util import *
| 41.285714
| 63
| 0.854671
| 40
| 289
| 6.05
| 0.475
| 0.247934
| 0.309917
| 0.516529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003759
| 0.079585
| 289
| 6
| 64
| 48.166667
| 0.906015
| 0.041522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6c32570603acb4e88589377c1bd8936afe15b718
| 38
|
py
|
Python
|
__init__.py
|
Mateil04/bcg_analysis
|
0eb1582439e00ff77634389e26c40ef573e09930
|
[
"MIT"
] | 1
|
2020-11-04T09:56:46.000Z
|
2020-11-04T09:56:46.000Z
|
__init__.py
|
Mateil04/bcg_analysis
|
0eb1582439e00ff77634389e26c40ef573e09930
|
[
"MIT"
] | null | null | null |
__init__.py
|
Mateil04/bcg_analysis
|
0eb1582439e00ff77634389e26c40ef573e09930
|
[
"MIT"
] | null | null | null |
from .bcg_analysis import Generate_BCG
| 38
| 38
| 0.894737
| 6
| 38
| 5.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 38
| 1
| 38
| 38
| 0.914286
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6c3d551ae0543efbea2ecb4b38db0c7bdca4dbd2
| 268
|
py
|
Python
|
checkov/common/checks_infra/solvers/__init__.py
|
niradler/checkov
|
2628c6f28a5604efe3877d6eacc3044d2b66b7b1
|
[
"Apache-2.0"
] | 4,013
|
2019-12-09T13:16:54.000Z
|
2022-03-31T14:31:01.000Z
|
checkov/common/checks_infra/solvers/__init__.py
|
niradler/checkov
|
2628c6f28a5604efe3877d6eacc3044d2b66b7b1
|
[
"Apache-2.0"
] | 1,258
|
2019-12-17T09:55:51.000Z
|
2022-03-31T19:17:17.000Z
|
checkov/common/checks_infra/solvers/__init__.py
|
niradler/checkov
|
2628c6f28a5604efe3877d6eacc3044d2b66b7b1
|
[
"Apache-2.0"
] | 638
|
2019-12-19T08:57:38.000Z
|
2022-03-30T21:38:37.000Z
|
from checkov.common.checks_infra.solvers.attribute_solvers import *
from checkov.common.checks_infra.solvers.complex_solvers import *
from checkov.common.checks_infra.solvers.connections_solvers import *
from checkov.common.checks_infra.solvers.filter_solvers import *
| 67
| 69
| 0.869403
| 36
| 268
| 6.25
| 0.305556
| 0.195556
| 0.302222
| 0.408889
| 0.795556
| 0.795556
| 0.64
| 0.64
| 0
| 0
| 0
| 0
| 0.05597
| 268
| 4
| 70
| 67
| 0.889328
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6c77ce4dcb86308954ea80a4967b669b0d08eae0
| 217
|
py
|
Python
|
backend/streams_explorer/api/dependencies/dataflow_graph.py
|
twiechert/streams-explorer
|
7b5840f1efa0f7e352aef05ef8648188ee188f7a
|
[
"MIT"
] | 24
|
2021-02-17T16:29:07.000Z
|
2022-01-03T05:13:31.000Z
|
backend/streams_explorer/api/dependencies/dataflow_graph.py
|
twiechert/streams-explorer
|
7b5840f1efa0f7e352aef05ef8648188ee188f7a
|
[
"MIT"
] | 116
|
2021-02-03T08:44:27.000Z
|
2022-03-22T09:50:03.000Z
|
backend/streams_explorer/api/dependencies/dataflow_graph.py
|
twiechert/streams-explorer
|
7b5840f1efa0f7e352aef05ef8648188ee188f7a
|
[
"MIT"
] | 2
|
2021-04-06T15:34:37.000Z
|
2021-05-14T16:52:44.000Z
|
from starlette.requests import Request
from streams_explorer.core.services.dataflow_graph import DataFlowGraph
def get_dataflow_graph(request: Request) -> DataFlowGraph:
return request.app.state.dataflow_graph
| 27.125
| 71
| 0.83871
| 27
| 217
| 6.555556
| 0.62963
| 0.220339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101382
| 217
| 7
| 72
| 31
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6c7e35dda1131aaa45a4045dd283e7ac40f11d9f
| 84
|
py
|
Python
|
day1/test_d1.py
|
agnul/AdventOfCode2019
|
422a8cc11889473e9a046f25131163fe0b67d6b5
|
[
"Unlicense"
] | null | null | null |
day1/test_d1.py
|
agnul/AdventOfCode2019
|
422a8cc11889473e9a046f25131163fe0b67d6b5
|
[
"Unlicense"
] | null | null | null |
day1/test_d1.py
|
agnul/AdventOfCode2019
|
422a8cc11889473e9a046f25131163fe0b67d6b5
|
[
"Unlicense"
] | null | null | null |
from day1 import fuel_for_mass
def test_fuel():
assert fuel_for_mass(2) == -2
| 14
| 33
| 0.714286
| 15
| 84
| 3.666667
| 0.666667
| 0.254545
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044118
| 0.190476
| 84
| 5
| 34
| 16.8
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
dd172ed876c103df3eb96c53ea9bf82cc0d4d109
| 54,411
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/global_/timers/max_metric/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/global_/timers/max_metric/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/ospfv2/global_/timers/max_metric/state/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/global/timers/max-metric/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state parameters relating to setting the
OSPFv2 maximum metric for a set of advertised entities.
"""
__slots__ = (
"_path_helper", "_extmethods", "__set_", "__timeout", "__include", "__trigger"
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__set_ = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="set",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__timeout = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="timeout",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
self.__include = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="include",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
self.__trigger = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="trigger",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"global",
"timers",
"max-metric",
"state",
]
def _get_set_(self):
"""
Getter method for set_, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/set (boolean)
YANG Description: When this leaf is set to true, all non-stub interfaces of
the local system are advertised with the maximum metric,
such that the router does not act as a transit system,
(similarly to the IS-IS overload functionality).
"""
return self.__set_
def _set_set_(self, v, load=False):
"""
Setter method for set_, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/set (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_set_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_set_() directly.
YANG Description: When this leaf is set to true, all non-stub interfaces of
the local system are advertised with the maximum metric,
such that the router does not act as a transit system,
(similarly to the IS-IS overload functionality).
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
is_leaf=True,
yang_name="set",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """set_ must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="set", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__set_ = t
if hasattr(self, "_set"):
self._set()
def _unset_set_(self):
self.__set_ = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="set",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_timeout(self):
"""
Getter method for timeout, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/timeout (uint64)
YANG Description: The delay, in seconds, after which the advertisement of
entities with the maximum metric should be cleared, and
the system reverts to the default, or configured, metrics.
"""
return self.__timeout
def _set_timeout(self, v, load=False):
"""
Setter method for timeout, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/timeout (uint64)
If this variable is read-only (config: false) in the
source YANG file, then _set_timeout is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_timeout() directly.
YANG Description: The delay, in seconds, after which the advertisement of
entities with the maximum metric should be cleared, and
the system reverts to the default, or configured, metrics.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="timeout",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """timeout must be of a type compatible with uint64""",
"defined-type": "uint64",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="timeout", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint64', is_config=False)""",
}
)
self.__timeout = t
if hasattr(self, "_set"):
self._set()
def _unset_timeout(self):
self.__timeout = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="timeout",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
def _get_include(self):
"""
Getter method for include, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/include (identityref)
YANG Description: By default, the maximum metric is advertised for all
non-stub interfaces of a device. When identities are
specified within this leaf-list, additional entities
are also advertised with the maximum metric according
to the values within the list.
"""
return self.__include
def _set_include(self, v, load=False):
"""
Setter method for include, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/include (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_include is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_include() directly.
YANG Description: By default, the maximum metric is advertised for all
non-stub interfaces of a device. When identities are
specified within this leaf-list, additional entities
are also advertised with the maximum metric according
to the values within the list.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="include",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """include must be of a type compatible with identityref""",
"defined-type": "openconfig-network-instance:identityref",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'MAX_METRIC_INCLUDE_STUB': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospf-types:MAX_METRIC_INCLUDE_STUB': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospft:MAX_METRIC_INCLUDE_STUB': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'MAX_METRIC_INCLUDE_TYPE2_EXTERNAL': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospf-types:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospft:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}},)), is_leaf=False, yang_name="include", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
}
)
self.__include = t
if hasattr(self, "_set"):
self._set()
def _unset_include(self):
self.__include = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="include",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
def _get_trigger(self):
"""
Getter method for trigger, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/trigger (identityref)
YANG Description: By default, the maximum metric is only advertised
when the max-metric/set leaf is specified as true.
In the case that identities are specified within this
list, they provide additional triggers (e.g., system
boot) that may cause the max-metric to be set. In this
case, the system should still honour the timeout specified
by the max-metric/timeout leaf, and clear the max-metric
advertisements after the expiration of this timer.
"""
return self.__trigger
def _set_trigger(self, v, load=False):
"""
Setter method for trigger, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/trigger (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_trigger is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_trigger() directly.
YANG Description: By default, the maximum metric is only advertised
when the max-metric/set leaf is specified as true.
In the case that identities are specified within this
list, they provide additional triggers (e.g., system
boot) that may cause the max-metric to be set. In this
case, the system should still honour the timeout specified
by the max-metric/timeout leaf, and clear the max-metric
advertisements after the expiration of this timer.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="trigger",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """trigger must be of a type compatible with identityref""",
"defined-type": "openconfig-network-instance:identityref",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'MAX_METRIC_ON_SYSTEM_BOOT': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospf-types:MAX_METRIC_ON_SYSTEM_BOOT': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospft:MAX_METRIC_ON_SYSTEM_BOOT': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}},)), is_leaf=False, yang_name="trigger", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
}
)
self.__trigger = t
if hasattr(self, "_set"):
self._set()
def _unset_trigger(self):
self.__trigger = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="trigger",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
set_ = __builtin__.property(_get_set_)
timeout = __builtin__.property(_get_timeout)
include = __builtin__.property(_get_include)
trigger = __builtin__.property(_get_trigger)
_pyangbind_elements = OrderedDict(
[
("set_", set_),
("timeout", timeout),
("include", include),
("trigger", trigger),
]
)
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/global/timers/max-metric/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state parameters relating to setting the
OSPFv2 maximum metric for a set of advertised entities.
"""
__slots__ = (
"_path_helper", "_extmethods", "__set_", "__timeout", "__include", "__trigger"
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__set_ = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="set",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__timeout = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="timeout",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
self.__include = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="include",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
self.__trigger = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="trigger",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"global",
"timers",
"max-metric",
"state",
]
def _get_set_(self):
"""
Getter method for set_, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/set (boolean)
YANG Description: When this leaf is set to true, all non-stub interfaces of
the local system are advertised with the maximum metric,
such that the router does not act as a transit system,
(similarly to the IS-IS overload functionality).
"""
return self.__set_
def _set_set_(self, v, load=False):
"""
Setter method for set_, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/set (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_set_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_set_() directly.
YANG Description: When this leaf is set to true, all non-stub interfaces of
the local system are advertised with the maximum metric,
such that the router does not act as a transit system,
(similarly to the IS-IS overload functionality).
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
is_leaf=True,
yang_name="set",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """set_ must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="set", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__set_ = t
if hasattr(self, "_set"):
self._set()
def _unset_set_(self):
self.__set_ = YANGDynClass(
base=YANGBool,
is_leaf=True,
yang_name="set",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_timeout(self):
"""
Getter method for timeout, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/timeout (uint64)
YANG Description: The delay, in seconds, after which the advertisement of
entities with the maximum metric should be cleared, and
the system reverts to the default, or configured, metrics.
"""
return self.__timeout
def _set_timeout(self, v, load=False):
"""
Setter method for timeout, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/timeout (uint64)
If this variable is read-only (config: false) in the
source YANG file, then _set_timeout is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_timeout() directly.
YANG Description: The delay, in seconds, after which the advertisement of
entities with the maximum metric should be cleared, and
the system reverts to the default, or configured, metrics.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="timeout",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """timeout must be of a type compatible with uint64""",
"defined-type": "uint64",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="timeout", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint64', is_config=False)""",
}
)
self.__timeout = t
if hasattr(self, "_set"):
self._set()
def _unset_timeout(self):
self.__timeout = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="timeout",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
def _get_include(self):
"""
Getter method for include, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/include (identityref)
YANG Description: By default, the maximum metric is advertised for all
non-stub interfaces of a device. When identities are
specified within this leaf-list, additional entities
are also advertised with the maximum metric according
to the values within the list.
"""
return self.__include
def _set_include(self, v, load=False):
"""
Setter method for include, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/include (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_include is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_include() directly.
YANG Description: By default, the maximum metric is advertised for all
non-stub interfaces of a device. When identities are
specified within this leaf-list, additional entities
are also advertised with the maximum metric according
to the values within the list.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="include",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """include must be of a type compatible with identityref""",
"defined-type": "openconfig-network-instance:identityref",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'MAX_METRIC_INCLUDE_STUB': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospf-types:MAX_METRIC_INCLUDE_STUB': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospft:MAX_METRIC_INCLUDE_STUB': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'MAX_METRIC_INCLUDE_TYPE2_EXTERNAL': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospf-types:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospft:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}},)), is_leaf=False, yang_name="include", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
}
)
self.__include = t
if hasattr(self, "_set"):
self._set()
def _unset_include(self):
self.__include = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_STUB": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_INCLUDE_TYPE2_EXTERNAL": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="include",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
def _get_trigger(self):
"""
Getter method for trigger, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/trigger (identityref)
YANG Description: By default, the maximum metric is only advertised
when the max-metric/set leaf is specified as true.
In the case that identities are specified within this
list, they provide additional triggers (e.g., system
boot) that may cause the max-metric to be set. In this
case, the system should still honour the timeout specified
by the max-metric/timeout leaf, and clear the max-metric
advertisements after the expiration of this timer.
"""
return self.__trigger
def _set_trigger(self, v, load=False):
"""
Setter method for trigger, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/global/timers/max_metric/state/trigger (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_trigger is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_trigger() directly.
YANG Description: By default, the maximum metric is only advertised
when the max-metric/set leaf is specified as true.
In the case that identities are specified within this
list, they provide additional triggers (e.g., system
boot) that may cause the max-metric to be set. In this
case, the system should still honour the timeout specified
by the max-metric/timeout leaf, and clear the max-metric
advertisements after the expiration of this timer.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="trigger",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """trigger must be of a type compatible with identityref""",
"defined-type": "openconfig-network-instance:identityref",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'MAX_METRIC_ON_SYSTEM_BOOT': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospf-types:MAX_METRIC_ON_SYSTEM_BOOT': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}, 'oc-ospft:MAX_METRIC_ON_SYSTEM_BOOT': {'@module': 'openconfig-ospf-types', '@namespace': 'http://openconfig.net/yang/ospf-types'}},)), is_leaf=False, yang_name="trigger", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
}
)
self.__trigger = t
if hasattr(self, "_set"):
self._set()
def _unset_trigger(self):
self.__trigger = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospf-types:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
"oc-ospft:MAX_METRIC_ON_SYSTEM_BOOT": {
"@module": "openconfig-ospf-types",
"@namespace": "http://openconfig.net/yang/ospf-types",
},
},
)
),
is_leaf=False,
yang_name="trigger",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
set_ = __builtin__.property(_get_set_)
timeout = __builtin__.property(_get_timeout)
include = __builtin__.property(_get_include)
trigger = __builtin__.property(_get_trigger)
_pyangbind_elements = OrderedDict(
[
("set_", set_),
("timeout", timeout),
("include", include),
("trigger", trigger),
]
)
| 46.465414
| 1,256
| 0.556285
| 5,346
| 54,411
| 5.454545
| 0.047138
| 0.051852
| 0.08203
| 0.09273
| 0.98714
| 0.979698
| 0.979698
| 0.979698
| 0.979698
| 0.979698
| 0
| 0.007796
| 0.339913
| 54,411
| 1,170
| 1,257
| 46.505128
| 0.804098
| 0.189576
| 0
| 0.832244
| 0
| 0.008715
| 0.345171
| 0.15442
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030501
| false
| 0
| 0.01634
| 0
| 0.079521
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dd4bfe6ab18e48a9f82bec85bbc8b4e548b7d05d
| 101
|
py
|
Python
|
veros/setups/global_flexible/__init__.py
|
AkasDutta/veros
|
9f530596a0148a398829050017de3e01a71261a0
|
[
"MIT"
] | 115
|
2019-11-23T02:31:30.000Z
|
2022-03-29T12:58:30.000Z
|
veros/setups/global_flexible/__init__.py
|
AkasDutta/veros
|
9f530596a0148a398829050017de3e01a71261a0
|
[
"MIT"
] | 207
|
2019-11-21T13:21:22.000Z
|
2022-03-31T23:36:09.000Z
|
veros/setups/global_flexible/__init__.py
|
AkasDutta/veros
|
9f530596a0148a398829050017de3e01a71261a0
|
[
"MIT"
] | 21
|
2020-01-28T13:13:39.000Z
|
2022-02-02T13:46:33.000Z
|
from veros.setups.global_flexible.global_flexible import GlobalFlexibleResolutionSetup # noqa: F401
| 50.5
| 100
| 0.871287
| 11
| 101
| 7.818182
| 0.818182
| 0.325581
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032258
| 0.079208
| 101
| 1
| 101
| 101
| 0.892473
| 0.09901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
660af869b370530ef30d6c8acfd09ec07324f139
| 2,022
|
py
|
Python
|
predict-py.py
|
kristinepetrosyan/spamemailclassify
|
a0998958a7069b301e337cf1982cbac618420eba
|
[
"MIT"
] | null | null | null |
predict-py.py
|
kristinepetrosyan/spamemailclassify
|
a0998958a7069b301e337cf1982cbac618420eba
|
[
"MIT"
] | null | null | null |
predict-py.py
|
kristinepetrosyan/spamemailclassify
|
a0998958a7069b301e337cf1982cbac618420eba
|
[
"MIT"
] | null | null | null |
{"cells":[{"metadata":{"_uuid":"9675078f-2f01-4e9e-8d52-4f1bf7e90718","_cell_guid":"2b3a4220-20af-4ba3-89bd-7a1f7fffd9aa","trusted":true},"cell_type":"code","source":"from joblib import load\nfrom preprocess import prep_data\nimport time\nimport os\n\n\n###############################################################################\n# MODEL PREDICTIONS #\n###############################################################################\n\ndef predict(s):\n\n X_string = prep_data(s)\n\n # loading the model\n spam_classifier = load(os.path.join(\"model\", \"svm_spam_classifier.joblib\"))\n\n # predicting\n spam_classifier_pred = spam_classifier.predict(X_string[\"clean_text\"])\n\n if spam_classifier_pred == 1:\n result = \"Spam\"\n else:\n result = \"Not Spam\"\n\n return result\n\n\n###############################################################################\n# MAIN #\n###############################################################################\n\nif __name__ == \"__main__\":\n\n t = time.time()\n string = \"\"\"We have filled the role and unfortunately, we will not be moving forward with your candidacy. Thank you again for your application, and time. We are a fast growing company, and our roles are constantly changing. Watch our job openings at Careers for future opportunities.\n We wish you the best in your endeavors. \"\"\"\n print(string)\n print(predict(string))\n print(f\"Preprocessing Time: {time.time() - t} seconds\")","execution_count":null,"outputs":[]}],"metadata":{"kernelspec":{"language":"python","display_name":"Python 3","name":"python3"},"language_info":{"pygments_lexer":"ipython3","nbconvert_exporter":"python","version":"3.6.4","file_extension":".py","codemirror_mode":{"name":"ipython","version":3},"name":"python","mimetype":"text/x-python"}},"nbformat":4,"nbformat_minor":4}
| 2,022
| 2,022
| 0.541543
| 236
| 2,022
| 4.5
| 0.550847
| 0.026365
| 0.011299
| 0.007533
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028655
| 0.154303
| 2,022
| 1
| 2,022
| 2,022
| 0.592398
| 0
| 0
| 0
| 0
| 2
| 0.693524
| 0.242709
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 9
|
66148a4106865da76f6dea65d4ecc0762716bb5e
| 17,047
|
py
|
Python
|
python_code/tiny_tile_server/python_wmts.py
|
jharpster/WMTS
|
59768683ea96cdf3e5520e3209f2edd2644d1bd4
|
[
"Unlicense"
] | 4
|
2016-07-29T08:00:44.000Z
|
2019-11-29T16:08:20.000Z
|
python_code/tiny_tile_server/python_wmts.py
|
carmencampos/WMTS
|
59768683ea96cdf3e5520e3209f2edd2644d1bd4
|
[
"Unlicense"
] | null | null | null |
python_code/tiny_tile_server/python_wmts.py
|
carmencampos/WMTS
|
59768683ea96cdf3e5520e3209f2edd2644d1bd4
|
[
"Unlicense"
] | null | null | null |
import bottle
import python_server
from python_server import *
python_wmts = bottle.Bottle()
def get_tile_wmts(mylayer, x, y, z, ext):
mercator = GlobalMercator()
layer = mylayer
mytitle = title
config = config_url[0]
TileMatrix = z
TileCol = x
TileRow = y
global mymaps
mymaps = maps()
m = mymaps[0]
basename = m['basename']
profile = m['profile']
bounds = m['bounds']
format = m['format']
mime = 'image/jpeg' if (format == 'jpg') else 'image/png'
if (profile == 'geodetic'):
tileMatrixSet = "WGS84"
else:
tileMatrixSet = "GoogleMapsCompatible"
a = bounds[1]
b = bounds[0]
c = bounds[3]
d = bounds[2]
(minx, miny) = mercator.LatLonToMeters(float(a), float(b))
(maxx, maxy) = mercator.LatLonToMeters(float(c), float(d))
bounds3857 = [minx, miny, maxx, maxy]
# Here is described and created the getCapabilities information in XML
# Following its specification
# http://www.opengeospatial.org/standards/wmts
bottle.response.content_type = "application/xml"
return """
<Capabilities xmlns="http://www.opengis.net/wmts/1.0" xmlns:ows="http://www.opengis.net/ows/1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:gml="http://www.opengis.net/gml" xsi:schemaLocation="http://www.opengis.net/wmts/1.0 http://schemas.opengis.net/wmts/1.0/wmtsGetCapabilities_response.xsd" version="1.0.0">
<!-- Service Identification -->
<ows:ServiceIdentification>
<ows:Title>"""+ mytitle +"""</ows:Title>
<ows:ServiceType>OGC WMTS</ows:ServiceType>
<ows:ServiceTypeVersion>1.0.0</ows:ServiceTypeVersion>
</ows:ServiceIdentification>
<!-- Operations Metadata -->
<ows:OperationsMetadata>
<ows:Operation name="GetCapabilities">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href='"""+ config +"""wmts/1.0.0/WMTSCapabilities.xml'>
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>RESTful</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
<!-- Add KVP binding in 10.1 -->
<ows:Get xlink:href='"""+ config +"""wmts?'>
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>KVP</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
<ows:Operation name="GetTile">
<ows:DCP>
<ows:HTTP>
<ows:Get xlink:href='"""+ config +"""api/tile/"""+ basename +"""'>
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>RESTful</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
<ows:Get xlink:href='"""+ config +"""api/tile/"""+ basename +"""'>
<ows:Constraint name="GetEncoding">
<ows:AllowedValues>
<ows:Value>KVP</ows:Value>
</ows:AllowedValues>
</ows:Constraint>
</ows:Get>
</ows:HTTP>
</ows:DCP>
</ows:Operation>
</ows:OperationsMetadata>
<Contents>
<Layer>
<ows:Title>"""+ basename +"""</ows:Title>
<ows:Identifier>"""+ basename +"""</ows:Identifier>
<ows:WGS84BoundingBox crs="urn:ogc:def:crs:OGC:2:84">
<ows:LowerCorner>"""+ bounds[0] + ' ' + bounds[1] +"""</ows:LowerCorner>
<ows:UpperCorner>"""+ bounds[2] + ' ' + bounds[3] +"""</ows:UpperCorner>
</ows:WGS84BoundingBox>
<Style isDefault="true">
<ows:Identifier>default</ows:Identifier>
</Style>
<Format>"""+ mime +"""</Format>
<TileMatrixSetLink>
<TileMatrixSet>"""+ tileMatrixSet +"""</TileMatrixSet>
</TileMatrixSetLink>
<ResourceURL format="mime" resourceType="tile" template='"""+ config +"""api/tile/"""+ basename +"""/"""+ TileMatrix +"""/"""+ TileCol +"""/"""+ TileRow +"""."""+ format +"""'/>
</Layer>
<!--TileMatrixSet-->
<TileMatrixSet>
<ows:Title>GoogleMapsCompatible</ows:Title>
<ows:Abstract>'GoogleMapsCompatible' tile matrix set defined by OGC WMTS specification</ows:Abstract>
<ows:Identifier>GoogleMapsCompatible</ows:Identifier>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG:6.18:3:3857</ows:SupportedCRS>
<WellKnownScaleSet>urn:ogc:def:wkss:OGC:1.0:GoogleMapsCompatible</WellKnownScaleSet>
<TileMatrix>
<ows:Identifier>0</ows:Identifier>
<ScaleDenominator>559082264.0287178</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>1</ows:Identifier>
<ScaleDenominator>279541132.0143589</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2</MatrixWidth>
<MatrixHeight>2</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>2</ows:Identifier>
<ScaleDenominator>139770566.0071794</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4</MatrixWidth>
<MatrixHeight>4</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>3</ows:Identifier>
<ScaleDenominator>69885283.00358972</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>8</MatrixWidth>
<MatrixHeight>8</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>4</ows:Identifier>
<ScaleDenominator>34942641.50179486</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>16</MatrixWidth>
<MatrixHeight>16</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>5</ows:Identifier>
<ScaleDenominator>17471320.75089743</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>32</MatrixWidth>
<MatrixHeight>32</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>6</ows:Identifier>
<ScaleDenominator>8735660.375448715</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>64</MatrixWidth>
<MatrixHeight>64</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>7</ows:Identifier>
<ScaleDenominator>4367830.187724357</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>128</MatrixWidth>
<MatrixHeight>128</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>8</ows:Identifier>
<ScaleDenominator>2183915.093862179</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>256</MatrixWidth>
<MatrixHeight>256</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>9</ows:Identifier>
<ScaleDenominator>1091957.546931089</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>512</MatrixWidth>
<MatrixHeight>512</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>10</ows:Identifier>
<ScaleDenominator>545978.7734655447</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1024</MatrixWidth>
<MatrixHeight>1024</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>11</ows:Identifier>
<ScaleDenominator>272989.3867327723</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2048</MatrixWidth>
<MatrixHeight>2048</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>12</ows:Identifier>
<ScaleDenominator>136494.6933663862</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4096</MatrixWidth>
<MatrixHeight>4096</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>13</ows:Identifier>
<ScaleDenominator>68247.34668319309</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>8192</MatrixWidth>
<MatrixHeight>8192</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>14</ows:Identifier>
<ScaleDenominator>34123.67334159654</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>16384</MatrixWidth>
<MatrixHeight>16384</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>15</ows:Identifier>
<ScaleDenominator>17061.83667079827</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>32768</MatrixWidth>
<MatrixHeight>32768</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>16</ows:Identifier>
<ScaleDenominator>8530.918335399136</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>65536</MatrixWidth>
<MatrixHeight>65536</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>17</ows:Identifier>
<ScaleDenominator>4265.459167699568</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>131072</MatrixWidth>
<MatrixHeight>131072</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>18</ows:Identifier>
<ScaleDenominator>2132.729583849784</ScaleDenominator>
<TopLeftCorner>-20037508.34278925 20037508.34278925</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>262144</MatrixWidth>
<MatrixHeight>262144</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
<TileMatrixSet>
<ows:Identifier>WGS84</ows:Identifier>
<ows:Title>GoogleCRS84Quad</ows:Title>
<ows:SupportedCRS>urn:ogc:def:crs:EPSG:6.3:4326</ows:SupportedCRS>
<ows:BoundingBox crs="urn:ogc:def:crs:EPSG:6.3:4326">
<LowerCorner>-180.000000 -90.000000</LowerCorner>
<UpperCorner>180.000000 90.000000</UpperCorner>
</ows:BoundingBox>
<WellKnownScaleSet>urn:ogc:def:wkss:OGC:1.0:GoogleCRS84Quad</WellKnownScaleSet>
<TileMatrix>
<ows:Identifier>0</ows:Identifier>
<ScaleDenominator>279541132.01435887813568115234</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2</MatrixWidth>
<MatrixHeight>1</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>1</ows:Identifier>
<ScaleDenominator>139770566.00717943906784057617</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4</MatrixWidth>
<MatrixHeight>2</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>2</ows:Identifier>
<ScaleDenominator>69885283.00358971953392028809</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>8</MatrixWidth>
<MatrixHeight>4</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>3</ows:Identifier>
<ScaleDenominator>34942641.50179485976696014404</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>16</MatrixWidth>
<MatrixHeight>8</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>4</ows:Identifier>
<ScaleDenominator>17471320.75089742988348007202</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>32</MatrixWidth>
<MatrixHeight>16</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>5</ows:Identifier>
<ScaleDenominator>8735660.37544871494174003601</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>64</MatrixWidth>
<MatrixHeight>32</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>6</ows:Identifier>
<ScaleDenominator>4367830.18772435747087001801</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>128</MatrixWidth>
<MatrixHeight>64</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>7</ows:Identifier>
<ScaleDenominator>2183915.09386217873543500900</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>256</MatrixWidth>
<MatrixHeight>128</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>8</ows:Identifier>
<ScaleDenominator>1091957.54693108936771750450</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>512</MatrixWidth>
<MatrixHeight>256</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>9</ows:Identifier>
<ScaleDenominator>545978.77346554468385875225</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>1024</MatrixWidth>
<MatrixHeight>512</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>10</ows:Identifier>
<ScaleDenominator>272989.38673277234192937613</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>2048</MatrixWidth>
<MatrixHeight>1024</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>11</ows:Identifier>
<ScaleDenominator>136494.69336638617096468806</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>4096</MatrixWidth>
<MatrixHeight>2048</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>12</ows:Identifier>
<ScaleDenominator>68247.34668319308548234403</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>8192</MatrixWidth>
<MatrixHeight>4096</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>13</ows:Identifier>
<ScaleDenominator>34123.67334159654274117202</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>16384</MatrixWidth>
<MatrixHeight>8192</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>14</ows:Identifier>
<ScaleDenominator>17061.83667079825318069197</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>32768</MatrixWidth>
<MatrixHeight>16384</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>15</ows:Identifier>
<ScaleDenominator>8530.91833539912659034599</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>65536</MatrixWidth>
<MatrixHeight>32768</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>16</ows:Identifier>
<ScaleDenominator>4265.45916769956329517299</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>131072</MatrixWidth>
<MatrixHeight>65536</MatrixHeight>
</TileMatrix>
<TileMatrix>
<ows:Identifier>17</ows:Identifier>
<ScaleDenominator>2132.72958384978574031265</ScaleDenominator>
<TopLeftCorner>90.000000 -180.000000</TopLeftCorner>
<TileWidth>256</TileWidth>
<TileHeight>256</TileHeight>
<MatrixWidth>262144</MatrixWidth>
<MatrixHeight>131072</MatrixHeight>
</TileMatrix>
</TileMatrixSet>
</Contents>
<ServiceMetadataURL xlink:href='"""+ config +"""wmts/1.0.0/WMTSCapabilities.xml'/>
</Capabilities>"""
| 36.270213
| 373
| 0.720185
| 1,628
| 17,047
| 7.536241
| 0.152948
| 0.086886
| 0.069362
| 0.102535
| 0.755481
| 0.746271
| 0.744233
| 0.740484
| 0.715951
| 0.706659
| 0
| 0.156439
| 0.126298
| 17,047
| 470
| 374
| 36.270213
| 0.667316
| 0.008271
| 0
| 0.731868
| 0
| 0.013187
| 0.931369
| 0.656964
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002198
| false
| 0
| 0.006593
| 0
| 0.010989
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b07bf14d29a94269f7b6cba71bb6d34bcd7884cb
| 525,118
|
py
|
Python
|
src/test/resources/nsgConvertedModel/main_script.py
|
openworm/org.geppetto.simulator.external
|
97313038fba475bedc80fe8e52c4db3c9994d417
|
[
"MIT"
] | 2
|
2016-12-12T12:33:56.000Z
|
2021-03-06T16:30:24.000Z
|
src/test/resources/nsgConvertedModel/main_script.py
|
openworm/org.geppetto.simulator.external
|
97313038fba475bedc80fe8e52c4db3c9994d417
|
[
"MIT"
] | 17
|
2015-02-25T19:33:10.000Z
|
2021-03-24T17:31:52.000Z
|
src/test/resources/nsgConvertedModel/main_script.py
|
openworm/org.geppetto.simulator.external
|
97313038fba475bedc80fe8e52c4db3c9994d417
|
[
"MIT"
] | null | null | null |
'''
Neuron simulator export for:
Components:
network_ACnet2 (Type: networkWithTemperature: temperature=279.45 (SI temperature))
sim1 (Type: Simulation: length=0.3 (SI time) step=1.0E-5 (SI time))
This NEURON file has been generated by org.neuroml.export (see https://github.com/NeuroML/org.neuroml.export)
org.neuroml.export v1.4.4
org.neuroml.model v1.4.4
jLEMS v0.9.8.4
'''
import neuron
import time
h = neuron.h
h.load_file("stdlib.hoc")
h.load_file("stdgui.hoc")
h("objref p")
h("p = new PythonObject()")
# Adding simulation Component(id=sim1 type=Simulation) of network/component: network_ACnet2 (Type: networkWithTemperature: temperature=279.45 (SI temperature))
# Temperature used for network: 279.45 K
h.celsius = 279.45 - 273.15
print("Population pyramidals_48 contains 48 instance(s) of component: pyr_4_sym of type: cell")
print("Setting the default initial concentrations for ca (used in pyr_4_sym) to 5.0E-5 mM (internal), 3.0 mM (external)")
h("cai0_ca_ion = 5.0E-5")
h("cao0_ca_ion = 3.0")
h.load_file("pyr_4_sym.hoc")
a_pyramidals_48 = []
h("n_pyramidals_48 = 48")
h("objectvar a_pyramidals_48[n_pyramidals_48]")
for i in range(int(h.n_pyramidals_48)):
h("a_pyramidals_48[%i] = new pyr_4_sym()"%i)
h("access a_pyramidals_48[%i].soma"%i)
h("a_pyramidals_48[0].position(64.2564, 0.68375766, 94.83054)")
h("a_pyramidals_48[1].position(384.13205, 31.320269, 105.99679)")
h("a_pyramidals_48[2].position(176.90724, 31.218168, 84.67269)")
h("a_pyramidals_48[3].position(254.302, 31.820261, 345.84885)")
h("a_pyramidals_48[4].position(233.95702, 31.835752, 451.20184)")
h("a_pyramidals_48[5].position(209.1071, 28.383997, 130.2079)")
h("a_pyramidals_48[6].position(92.42472, 43.18615, 201.11111)")
h("a_pyramidals_48[7].position(377.63718, 4.9532323, 102.319244)")
h("a_pyramidals_48[8].position(226.27583, 14.309091, 12.907714)")
h("a_pyramidals_48[9].position(120.41029, 30.946571, 38.68389)")
h("a_pyramidals_48[10].position(29.497803, 19.15662, 7.760048)")
h("a_pyramidals_48[11].position(441.84177, 24.596628, 431.78842)")
h("a_pyramidals_48[12].position(80.34852, 11.467603, 259.14386)")
h("a_pyramidals_48[13].position(293.62384, 26.579428, 88.35277)")
h("a_pyramidals_48[14].position(372.12997, 12.358931, 104.25025)")
h("a_pyramidals_48[15].position(154.9465, 1.1012793, 337.73508)")
h("a_pyramidals_48[17].position(149.21432, 2.4104266, 211.67845)")
h("a_pyramidals_48[16].position(42.568623, 24.733513, 160.91415)")
h("a_pyramidals_48[19].position(116.32267, 0.893867, 210.1003)")
h("a_pyramidals_48[18].position(370.8707, 20.354103, 261.4979)")
h("a_pyramidals_48[21].position(296.66055, 22.536308, 188.89275)")
h("a_pyramidals_48[20].position(340.24274, 37.029236, 414.92313)")
h("a_pyramidals_48[23].position(183.67686, 8.052197, 366.93832)")
h("a_pyramidals_48[22].position(455.41083, 33.41776, 391.59827)")
h("a_pyramidals_48[25].position(155.74449, 6.9379034, 381.37143)")
h("a_pyramidals_48[24].position(269.1906, 22.978321, 74.24378)")
h("a_pyramidals_48[27].position(141.85143, 13.045913, 455.24518)")
h("a_pyramidals_48[26].position(79.68346, 7.487023, 4.2252245)")
h("a_pyramidals_48[29].position(202.82713, 47.069958, 221.71712)")
h("a_pyramidals_48[28].position(349.232, 22.139315, 82.177666)")
h("a_pyramidals_48[31].position(114.604294, 22.551754, 189.43771)")
h("a_pyramidals_48[30].position(180.02779, 4.136366, 85.817245)")
h("a_pyramidals_48[34].position(292.8651, 33.785324, 437.0381)")
h("a_pyramidals_48[35].position(277.8085, 22.897953, 419.0507)")
h("a_pyramidals_48[32].position(476.5766, 13.345018, 286.93124)")
h("a_pyramidals_48[33].position(222.39757, 19.97535, 179.3524)")
h("a_pyramidals_48[38].position(464.8008, 41.65847, 435.4519)")
h("a_pyramidals_48[39].position(342.59195, 11.596251, 385.38443)")
h("a_pyramidals_48[36].position(401.05313, 17.756813, 136.69092)")
h("a_pyramidals_48[37].position(111.605644, 14.521578, 98.30111)")
h("a_pyramidals_48[42].position(457.70523, 36.229473, 276.8031)")
h("a_pyramidals_48[43].position(410.01364, 0.4988253, 168.5312)")
h("a_pyramidals_48[40].position(98.5086, 24.474636, 238.42198)")
h("a_pyramidals_48[41].position(340.69547, 1.0447562, 433.7042)")
h("a_pyramidals_48[46].position(371.42322, 45.394405, 304.68243)")
h("a_pyramidals_48[47].position(233.55005, 22.25557, 245.82697)")
h("a_pyramidals_48[44].position(56.39708, 44.266815, 49.969612)")
h("a_pyramidals_48[45].position(104.02322, 11.90106, 66.28305)")
h("proc initialiseV_pyramidals_48() { for i = 0, n_pyramidals_48-1 { a_pyramidals_48[i].set_initial_v() } }")
h("objref fih_pyramidals_48")
h('{fih_pyramidals_48 = new FInitializeHandler(0, "initialiseV_pyramidals_48()")}')
h("proc initialiseIons_pyramidals_48() { for i = 0, n_pyramidals_48-1 { a_pyramidals_48[i].set_initial_ion_properties() } }")
h("objref fih_ion_pyramidals_48")
h('{fih_ion_pyramidals_48 = new FInitializeHandler(1, "initialiseIons_pyramidals_48()")}')
print("Population baskets_12 contains 12 instance(s) of component: bask of type: cell")
h.load_file("bask.hoc")
a_baskets_12 = []
h("n_baskets_12 = 12")
h("objectvar a_baskets_12[n_baskets_12]")
for i in range(int(h.n_baskets_12)):
h("a_baskets_12[%i] = new bask()"%i)
h("access a_baskets_12[%i].soma"%i)
h("a_baskets_12[0].position(372.5585, 75.342545, 459.21057)")
h("a_baskets_12[1].position(454.36914, 81.974014, 18.664598)")
h("a_baskets_12[2].position(74.28688, 98.43723, 496.8198)")
h("a_baskets_12[3].position(14.001072, 51.298218, 495.64935)")
h("a_baskets_12[4].position(104.659615, 96.06993, 361.19162)")
h("a_baskets_12[5].position(333.19098, 88.59382, 26.893526)")
h("a_baskets_12[6].position(198.22571, 70.067604, 211.09044)")
h("a_baskets_12[7].position(343.9047, 86.86429, 437.88196)")
h("a_baskets_12[8].position(339.1037, 96.47412, 354.17966)")
h("a_baskets_12[9].position(137.89124, 92.33597, 175.69292)")
h("a_baskets_12[10].position(52.321255, 93.11778, 447.05573)")
h("a_baskets_12[11].position(89.09762, 99.38623, 16.496897)")
h("proc initialiseV_baskets_12() { for i = 0, n_baskets_12-1 { a_baskets_12[i].set_initial_v() } }")
h("objref fih_baskets_12")
h('{fih_baskets_12 = new FInitializeHandler(0, "initialiseV_baskets_12()")}')
h("proc initialiseIons_baskets_12() { for i = 0, n_baskets_12-1 { a_baskets_12[i].set_initial_ion_properties() } }")
h("objref fih_ion_baskets_12")
h('{fih_ion_baskets_12 = new FInitializeHandler(1, "initialiseIons_baskets_12()")}')
# Adding projection: SmallNet_bask_bask, from baskets_12 to baskets_12 with synapse GABA_syn_inh, 60 connection(s)
h("objectvar syn_SmallNet_bask_bask_GABA_syn_inh[60]")
# Connection 0: 3, seg 0 (0.416606) -> 0, seg 0 (0.685771)
h("a_baskets_12[0].soma syn_SmallNet_bask_bask_GABA_syn_inh[0] = new GABA_syn_inh(0.685771)")
h("a_baskets_12[3].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.41660583), syn_SmallNet_bask_bask_GABA_syn_inh[0], 0.0, 0.0, 1.0))")
# Connection 1: 3, seg 0 (0.787689) -> 0, seg 0 (0.340524)
h("a_baskets_12[0].soma syn_SmallNet_bask_bask_GABA_syn_inh[1] = new GABA_syn_inh(0.340524)")
h("a_baskets_12[3].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.7876892), syn_SmallNet_bask_bask_GABA_syn_inh[1], 0.0, 0.0, 1.0))")
# Connection 2: 6, seg 0 (0.424612) -> 0, seg 0 (0.822448)
h("a_baskets_12[0].soma syn_SmallNet_bask_bask_GABA_syn_inh[2] = new GABA_syn_inh(0.822448)")
h("a_baskets_12[6].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.42461205), syn_SmallNet_bask_bask_GABA_syn_inh[2], 0.0, 0.0, 1.0))")
# Connection 3: 11, seg 0 (0.950093) -> 0, seg 0 (0.314711)
h("a_baskets_12[0].soma syn_SmallNet_bask_bask_GABA_syn_inh[3] = new GABA_syn_inh(0.314711)")
h("a_baskets_12[11].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.9500934), syn_SmallNet_bask_bask_GABA_syn_inh[3], 0.0, 0.0, 1.0))")
# Connection 4: 1, seg 0 (0.255018) -> 0, seg 0 (0.425168)
h("a_baskets_12[0].soma syn_SmallNet_bask_bask_GABA_syn_inh[4] = new GABA_syn_inh(0.425168)")
h("a_baskets_12[1].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.2550183), syn_SmallNet_bask_bask_GABA_syn_inh[4], 0.0, 0.0, 1.0))")
# Connection 5: 6, seg 0 (0.624269) -> 1, seg 0 (0.473862)
h("a_baskets_12[1].soma syn_SmallNet_bask_bask_GABA_syn_inh[5] = new GABA_syn_inh(0.473862)")
h("a_baskets_12[6].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.62426937), syn_SmallNet_bask_bask_GABA_syn_inh[5], 0.0, 0.0, 1.0))")
# Connection 6: 0, seg 0 (0.893419) -> 1, seg 0 (0.716105)
h("a_baskets_12[1].soma syn_SmallNet_bask_bask_GABA_syn_inh[6] = new GABA_syn_inh(0.716105)")
h("a_baskets_12[0].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.8934188), syn_SmallNet_bask_bask_GABA_syn_inh[6], 0.0, 0.0, 1.0))")
# Connection 7: 2, seg 0 (0.759285) -> 1, seg 0 (0.604111)
h("a_baskets_12[1].soma syn_SmallNet_bask_bask_GABA_syn_inh[7] = new GABA_syn_inh(0.604111)")
h("a_baskets_12[2].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.7592852), syn_SmallNet_bask_bask_GABA_syn_inh[7], 0.0, 0.0, 1.0))")
# Connection 8: 9, seg 0 (0.045865) -> 1, seg 0 (0.044528)
h("a_baskets_12[1].soma syn_SmallNet_bask_bask_GABA_syn_inh[8] = new GABA_syn_inh(0.044528)")
h("a_baskets_12[9].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.045865357), syn_SmallNet_bask_bask_GABA_syn_inh[8], 0.0, 0.0, 1.0))")
# Connection 9: 2, seg 0 (0.622715) -> 1, seg 0 (0.889082)
h("a_baskets_12[1].soma syn_SmallNet_bask_bask_GABA_syn_inh[9] = new GABA_syn_inh(0.889082)")
h("a_baskets_12[2].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.6227148), syn_SmallNet_bask_bask_GABA_syn_inh[9], 0.0, 0.0, 1.0))")
# Connection 10: 8, seg 0 (0.422401) -> 2, seg 0 (0.503136)
h("a_baskets_12[2].soma syn_SmallNet_bask_bask_GABA_syn_inh[10] = new GABA_syn_inh(0.503136)")
h("a_baskets_12[8].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.42240143), syn_SmallNet_bask_bask_GABA_syn_inh[10], 0.0, 0.0, 1.0))")
# Connection 11: 3, seg 0 (0.225164) -> 2, seg 0 (0.112254)
h("a_baskets_12[2].soma syn_SmallNet_bask_bask_GABA_syn_inh[11] = new GABA_syn_inh(0.112254)")
h("a_baskets_12[3].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.22516418), syn_SmallNet_bask_bask_GABA_syn_inh[11], 0.0, 0.0, 1.0))")
# Connection 12: 9, seg 0 (0.244167) -> 2, seg 0 (0.183087)
h("a_baskets_12[2].soma syn_SmallNet_bask_bask_GABA_syn_inh[12] = new GABA_syn_inh(0.183087)")
h("a_baskets_12[9].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.24416727), syn_SmallNet_bask_bask_GABA_syn_inh[12], 0.0, 0.0, 1.0))")
# Connection 13: 7, seg 0 (0.945036) -> 2, seg 0 (0.133165)
h("a_baskets_12[2].soma syn_SmallNet_bask_bask_GABA_syn_inh[13] = new GABA_syn_inh(0.133165)")
h("a_baskets_12[7].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.94503635), syn_SmallNet_bask_bask_GABA_syn_inh[13], 0.0, 0.0, 1.0))")
# Connection 14: 1, seg 0 (0.451799) -> 2, seg 0 (0.552482)
h("a_baskets_12[2].soma syn_SmallNet_bask_bask_GABA_syn_inh[14] = new GABA_syn_inh(0.552482)")
h("a_baskets_12[1].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.45179862), syn_SmallNet_bask_bask_GABA_syn_inh[14], 0.0, 0.0, 1.0))")
# Connection 15: 9, seg 0 (0.245043) -> 3, seg 0 (0.102446)
h("a_baskets_12[3].soma syn_SmallNet_bask_bask_GABA_syn_inh[15] = new GABA_syn_inh(0.102446)")
h("a_baskets_12[9].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.24504328), syn_SmallNet_bask_bask_GABA_syn_inh[15], 0.0, 0.0, 1.0))")
# Connection 16: 4, seg 0 (0.282358) -> 3, seg 0 (0.925143)
h("a_baskets_12[3].soma syn_SmallNet_bask_bask_GABA_syn_inh[16] = new GABA_syn_inh(0.925143)")
h("a_baskets_12[4].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.28235823), syn_SmallNet_bask_bask_GABA_syn_inh[16], 0.0, 0.0, 1.0))")
# Connection 17: 10, seg 0 (0.922459) -> 3, seg 0 (0.283169)
h("a_baskets_12[3].soma syn_SmallNet_bask_bask_GABA_syn_inh[17] = new GABA_syn_inh(0.283169)")
h("a_baskets_12[10].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.92245895), syn_SmallNet_bask_bask_GABA_syn_inh[17], 0.0, 0.0, 1.0))")
# Connection 18: 11, seg 0 (0.332195) -> 3, seg 0 (0.253767)
h("a_baskets_12[3].soma syn_SmallNet_bask_bask_GABA_syn_inh[18] = new GABA_syn_inh(0.253767)")
h("a_baskets_12[11].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.3321947), syn_SmallNet_bask_bask_GABA_syn_inh[18], 0.0, 0.0, 1.0))")
# Connection 19: 1, seg 0 (0.982219) -> 3, seg 0 (0.604215)
h("a_baskets_12[3].soma syn_SmallNet_bask_bask_GABA_syn_inh[19] = new GABA_syn_inh(0.604215)")
h("a_baskets_12[1].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.9822189), syn_SmallNet_bask_bask_GABA_syn_inh[19], 0.0, 0.0, 1.0))")
# Connection 20: 0, seg 0 (0.697187) -> 4, seg 0 (0.493495)
h("a_baskets_12[4].soma syn_SmallNet_bask_bask_GABA_syn_inh[20] = new GABA_syn_inh(0.493495)")
h("a_baskets_12[0].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.69718724), syn_SmallNet_bask_bask_GABA_syn_inh[20], 0.0, 0.0, 1.0))")
# Connection 21: 11, seg 0 (0.501080) -> 4, seg 0 (0.295666)
h("a_baskets_12[4].soma syn_SmallNet_bask_bask_GABA_syn_inh[21] = new GABA_syn_inh(0.295666)")
h("a_baskets_12[11].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.50108), syn_SmallNet_bask_bask_GABA_syn_inh[21], 0.0, 0.0, 1.0))")
# Connection 22: 1, seg 0 (0.649818) -> 4, seg 0 (0.332438)
h("a_baskets_12[4].soma syn_SmallNet_bask_bask_GABA_syn_inh[22] = new GABA_syn_inh(0.332438)")
h("a_baskets_12[1].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.649818), syn_SmallNet_bask_bask_GABA_syn_inh[22], 0.0, 0.0, 1.0))")
# Connection 23: 11, seg 0 (0.047697) -> 4, seg 0 (0.226918)
h("a_baskets_12[4].soma syn_SmallNet_bask_bask_GABA_syn_inh[23] = new GABA_syn_inh(0.226918)")
h("a_baskets_12[11].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.04769653), syn_SmallNet_bask_bask_GABA_syn_inh[23], 0.0, 0.0, 1.0))")
# Connection 24: 0, seg 0 (0.994105) -> 4, seg 0 (0.104700)
h("a_baskets_12[4].soma syn_SmallNet_bask_bask_GABA_syn_inh[24] = new GABA_syn_inh(0.104700)")
h("a_baskets_12[0].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.994105), syn_SmallNet_bask_bask_GABA_syn_inh[24], 0.0, 0.0, 1.0))")
# Connection 25: 9, seg 0 (0.106531) -> 5, seg 0 (0.349218)
h("a_baskets_12[5].soma syn_SmallNet_bask_bask_GABA_syn_inh[25] = new GABA_syn_inh(0.349218)")
h("a_baskets_12[9].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.10653067), syn_SmallNet_bask_bask_GABA_syn_inh[25], 0.0, 0.0, 1.0))")
# Connection 26: 2, seg 0 (0.728460) -> 5, seg 0 (0.274321)
h("a_baskets_12[5].soma syn_SmallNet_bask_bask_GABA_syn_inh[26] = new GABA_syn_inh(0.274321)")
h("a_baskets_12[2].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.72845984), syn_SmallNet_bask_bask_GABA_syn_inh[26], 0.0, 0.0, 1.0))")
# Connection 27: 7, seg 0 (0.767730) -> 5, seg 0 (0.509782)
h("a_baskets_12[5].soma syn_SmallNet_bask_bask_GABA_syn_inh[27] = new GABA_syn_inh(0.509782)")
h("a_baskets_12[7].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.76773024), syn_SmallNet_bask_bask_GABA_syn_inh[27], 0.0, 0.0, 1.0))")
# Connection 28: 0, seg 0 (0.464671) -> 5, seg 0 (0.238544)
h("a_baskets_12[5].soma syn_SmallNet_bask_bask_GABA_syn_inh[28] = new GABA_syn_inh(0.238544)")
h("a_baskets_12[0].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.46467108), syn_SmallNet_bask_bask_GABA_syn_inh[28], 0.0, 0.0, 1.0))")
# Connection 29: 11, seg 0 (0.588630) -> 5, seg 0 (0.080469)
h("a_baskets_12[5].soma syn_SmallNet_bask_bask_GABA_syn_inh[29] = new GABA_syn_inh(0.080469)")
h("a_baskets_12[11].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.5886299), syn_SmallNet_bask_bask_GABA_syn_inh[29], 0.0, 0.0, 1.0))")
# Connection 30: 9, seg 0 (0.197218) -> 6, seg 0 (0.901784)
h("a_baskets_12[6].soma syn_SmallNet_bask_bask_GABA_syn_inh[30] = new GABA_syn_inh(0.901784)")
h("a_baskets_12[9].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.19721818), syn_SmallNet_bask_bask_GABA_syn_inh[30], 0.0, 0.0, 1.0))")
# Connection 31: 5, seg 0 (0.439046) -> 6, seg 0 (0.038998)
h("a_baskets_12[6].soma syn_SmallNet_bask_bask_GABA_syn_inh[31] = new GABA_syn_inh(0.038998)")
h("a_baskets_12[5].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.43904626), syn_SmallNet_bask_bask_GABA_syn_inh[31], 0.0, 0.0, 1.0))")
# Connection 32: 11, seg 0 (0.946904) -> 6, seg 0 (0.301489)
h("a_baskets_12[6].soma syn_SmallNet_bask_bask_GABA_syn_inh[32] = new GABA_syn_inh(0.301489)")
h("a_baskets_12[11].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.946904), syn_SmallNet_bask_bask_GABA_syn_inh[32], 0.0, 0.0, 1.0))")
# Connection 33: 0, seg 0 (0.231292) -> 6, seg 0 (0.356746)
h("a_baskets_12[6].soma syn_SmallNet_bask_bask_GABA_syn_inh[33] = new GABA_syn_inh(0.356746)")
h("a_baskets_12[0].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.23129213), syn_SmallNet_bask_bask_GABA_syn_inh[33], 0.0, 0.0, 1.0))")
# Connection 34: 2, seg 0 (0.194122) -> 6, seg 0 (0.318762)
h("a_baskets_12[6].soma syn_SmallNet_bask_bask_GABA_syn_inh[34] = new GABA_syn_inh(0.318762)")
h("a_baskets_12[2].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.19412202), syn_SmallNet_bask_bask_GABA_syn_inh[34], 0.0, 0.0, 1.0))")
# Connection 35: 5, seg 0 (0.633284) -> 7, seg 0 (0.030376)
h("a_baskets_12[7].soma syn_SmallNet_bask_bask_GABA_syn_inh[35] = new GABA_syn_inh(0.030376)")
h("a_baskets_12[5].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.633284), syn_SmallNet_bask_bask_GABA_syn_inh[35], 0.0, 0.0, 1.0))")
# Connection 36: 8, seg 0 (0.304526) -> 7, seg 0 (0.097242)
h("a_baskets_12[7].soma syn_SmallNet_bask_bask_GABA_syn_inh[36] = new GABA_syn_inh(0.097242)")
h("a_baskets_12[8].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.30452627), syn_SmallNet_bask_bask_GABA_syn_inh[36], 0.0, 0.0, 1.0))")
# Connection 37: 3, seg 0 (0.288910) -> 7, seg 0 (0.770081)
h("a_baskets_12[7].soma syn_SmallNet_bask_bask_GABA_syn_inh[37] = new GABA_syn_inh(0.770081)")
h("a_baskets_12[3].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.2889096), syn_SmallNet_bask_bask_GABA_syn_inh[37], 0.0, 0.0, 1.0))")
# Connection 38: 0, seg 0 (0.079766) -> 7, seg 0 (0.024889)
h("a_baskets_12[7].soma syn_SmallNet_bask_bask_GABA_syn_inh[38] = new GABA_syn_inh(0.024889)")
h("a_baskets_12[0].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.079765916), syn_SmallNet_bask_bask_GABA_syn_inh[38], 0.0, 0.0, 1.0))")
# Connection 39: 1, seg 0 (0.447629) -> 7, seg 0 (0.305819)
h("a_baskets_12[7].soma syn_SmallNet_bask_bask_GABA_syn_inh[39] = new GABA_syn_inh(0.305819)")
h("a_baskets_12[1].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.44762868), syn_SmallNet_bask_bask_GABA_syn_inh[39], 0.0, 0.0, 1.0))")
# Connection 40: 10, seg 0 (0.513855) -> 8, seg 0 (0.083439)
h("a_baskets_12[8].soma syn_SmallNet_bask_bask_GABA_syn_inh[40] = new GABA_syn_inh(0.083439)")
h("a_baskets_12[10].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.5138552), syn_SmallNet_bask_bask_GABA_syn_inh[40], 0.0, 0.0, 1.0))")
# Connection 41: 0, seg 0 (0.626924) -> 8, seg 0 (0.792099)
h("a_baskets_12[8].soma syn_SmallNet_bask_bask_GABA_syn_inh[41] = new GABA_syn_inh(0.792099)")
h("a_baskets_12[0].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.62692386), syn_SmallNet_bask_bask_GABA_syn_inh[41], 0.0, 0.0, 1.0))")
# Connection 42: 9, seg 0 (0.792676) -> 8, seg 0 (0.375541)
h("a_baskets_12[8].soma syn_SmallNet_bask_bask_GABA_syn_inh[42] = new GABA_syn_inh(0.375541)")
h("a_baskets_12[9].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.7926757), syn_SmallNet_bask_bask_GABA_syn_inh[42], 0.0, 0.0, 1.0))")
# Connection 43: 3, seg 0 (0.632300) -> 8, seg 0 (0.645316)
h("a_baskets_12[8].soma syn_SmallNet_bask_bask_GABA_syn_inh[43] = new GABA_syn_inh(0.645316)")
h("a_baskets_12[3].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.6322999), syn_SmallNet_bask_bask_GABA_syn_inh[43], 0.0, 0.0, 1.0))")
# Connection 44: 0, seg 0 (0.375548) -> 8, seg 0 (0.141820)
h("a_baskets_12[8].soma syn_SmallNet_bask_bask_GABA_syn_inh[44] = new GABA_syn_inh(0.141820)")
h("a_baskets_12[0].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.375548), syn_SmallNet_bask_bask_GABA_syn_inh[44], 0.0, 0.0, 1.0))")
# Connection 45: 4, seg 0 (0.224258) -> 9, seg 0 (0.380814)
h("a_baskets_12[9].soma syn_SmallNet_bask_bask_GABA_syn_inh[45] = new GABA_syn_inh(0.380814)")
h("a_baskets_12[4].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.22425765), syn_SmallNet_bask_bask_GABA_syn_inh[45], 0.0, 0.0, 1.0))")
# Connection 46: 0, seg 0 (0.035965) -> 9, seg 0 (0.570749)
h("a_baskets_12[9].soma syn_SmallNet_bask_bask_GABA_syn_inh[46] = new GABA_syn_inh(0.570749)")
h("a_baskets_12[0].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.035964966), syn_SmallNet_bask_bask_GABA_syn_inh[46], 0.0, 0.0, 1.0))")
# Connection 47: 2, seg 0 (0.366381) -> 9, seg 0 (0.333854)
h("a_baskets_12[9].soma syn_SmallNet_bask_bask_GABA_syn_inh[47] = new GABA_syn_inh(0.333854)")
h("a_baskets_12[2].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.36638063), syn_SmallNet_bask_bask_GABA_syn_inh[47], 0.0, 0.0, 1.0))")
# Connection 48: 2, seg 0 (0.115292) -> 9, seg 0 (0.307239)
h("a_baskets_12[9].soma syn_SmallNet_bask_bask_GABA_syn_inh[48] = new GABA_syn_inh(0.307239)")
h("a_baskets_12[2].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.11529195), syn_SmallNet_bask_bask_GABA_syn_inh[48], 0.0, 0.0, 1.0))")
# Connection 49: 5, seg 0 (0.283497) -> 9, seg 0 (0.310595)
h("a_baskets_12[9].soma syn_SmallNet_bask_bask_GABA_syn_inh[49] = new GABA_syn_inh(0.310595)")
h("a_baskets_12[5].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.28349704), syn_SmallNet_bask_bask_GABA_syn_inh[49], 0.0, 0.0, 1.0))")
# Connection 50: 9, seg 0 (0.238150) -> 10, seg 0 (0.270978)
h("a_baskets_12[10].soma syn_SmallNet_bask_bask_GABA_syn_inh[50] = new GABA_syn_inh(0.270978)")
h("a_baskets_12[9].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.23815024), syn_SmallNet_bask_bask_GABA_syn_inh[50], 0.0, 0.0, 1.0))")
# Connection 51: 11, seg 0 (0.470458) -> 10, seg 0 (0.078177)
h("a_baskets_12[10].soma syn_SmallNet_bask_bask_GABA_syn_inh[51] = new GABA_syn_inh(0.078177)")
h("a_baskets_12[11].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.47045845), syn_SmallNet_bask_bask_GABA_syn_inh[51], 0.0, 0.0, 1.0))")
# Connection 52: 1, seg 0 (0.430757) -> 10, seg 0 (0.422194)
h("a_baskets_12[10].soma syn_SmallNet_bask_bask_GABA_syn_inh[52] = new GABA_syn_inh(0.422194)")
h("a_baskets_12[1].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.43075705), syn_SmallNet_bask_bask_GABA_syn_inh[52], 0.0, 0.0, 1.0))")
# Connection 53: 1, seg 0 (0.267107) -> 10, seg 0 (0.504498)
h("a_baskets_12[10].soma syn_SmallNet_bask_bask_GABA_syn_inh[53] = new GABA_syn_inh(0.504498)")
h("a_baskets_12[1].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.2671072), syn_SmallNet_bask_bask_GABA_syn_inh[53], 0.0, 0.0, 1.0))")
# Connection 54: 2, seg 0 (0.409871) -> 10, seg 0 (0.199592)
h("a_baskets_12[10].soma syn_SmallNet_bask_bask_GABA_syn_inh[54] = new GABA_syn_inh(0.199592)")
h("a_baskets_12[2].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.4098711), syn_SmallNet_bask_bask_GABA_syn_inh[54], 0.0, 0.0, 1.0))")
# Connection 55: 2, seg 0 (0.629367) -> 11, seg 0 (0.682510)
h("a_baskets_12[11].soma syn_SmallNet_bask_bask_GABA_syn_inh[55] = new GABA_syn_inh(0.682510)")
h("a_baskets_12[2].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.6293674), syn_SmallNet_bask_bask_GABA_syn_inh[55], 0.0, 0.0, 1.0))")
# Connection 56: 4, seg 0 (0.448295) -> 11, seg 0 (0.713540)
h("a_baskets_12[11].soma syn_SmallNet_bask_bask_GABA_syn_inh[56] = new GABA_syn_inh(0.713540)")
h("a_baskets_12[4].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.4482953), syn_SmallNet_bask_bask_GABA_syn_inh[56], 0.0, 0.0, 1.0))")
# Connection 57: 10, seg 0 (0.140714) -> 11, seg 0 (0.320555)
h("a_baskets_12[11].soma syn_SmallNet_bask_bask_GABA_syn_inh[57] = new GABA_syn_inh(0.320555)")
h("a_baskets_12[10].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.14071357), syn_SmallNet_bask_bask_GABA_syn_inh[57], 0.0, 0.0, 1.0))")
# Connection 58: 8, seg 0 (0.148640) -> 11, seg 0 (0.033758)
h("a_baskets_12[11].soma syn_SmallNet_bask_bask_GABA_syn_inh[58] = new GABA_syn_inh(0.033758)")
h("a_baskets_12[8].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.14863956), syn_SmallNet_bask_bask_GABA_syn_inh[58], 0.0, 0.0, 1.0))")
# Connection 59: 10, seg 0 (0.797065) -> 11, seg 0 (0.116422)
h("a_baskets_12[11].soma syn_SmallNet_bask_bask_GABA_syn_inh[59] = new GABA_syn_inh(0.116422)")
h("a_baskets_12[10].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.79706544), syn_SmallNet_bask_bask_GABA_syn_inh[59], 0.0, 0.0, 1.0))")
# Adding projection: SmallNet_bask_pyr, from baskets_12 to pyramidals_48 with synapse GABA_syn, 336 connection(s)
h("objectvar syn_SmallNet_bask_pyr_GABA_syn[336]")
# Connection 0: 10, seg 0 (0.058240) -> 0, seg 6 (0.026282)
h("a_pyramidals_48[0].basal0 syn_SmallNet_bask_pyr_GABA_syn[0] = new GABA_syn(0.026282)")
h("a_baskets_12[10].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.058240294), syn_SmallNet_bask_pyr_GABA_syn[0], 0.0, 0.0, 1.0))")
# Connection 1: 9, seg 0 (0.099164) -> 0, seg 6 (0.959484)
h("a_pyramidals_48[0].basal0 syn_SmallNet_bask_pyr_GABA_syn[1] = new GABA_syn(0.959484)")
h("a_baskets_12[9].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.09916419), syn_SmallNet_bask_pyr_GABA_syn[1], 0.0, 0.0, 1.0))")
# Connection 2: 7, seg 0 (0.179355) -> 0, seg 6 (0.155007)
h("a_pyramidals_48[0].basal0 syn_SmallNet_bask_pyr_GABA_syn[2] = new GABA_syn(0.155007)")
h("a_baskets_12[7].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.17935467), syn_SmallNet_bask_pyr_GABA_syn[2], 0.0, 0.0, 1.0))")
# Connection 3: 4, seg 0 (0.711143) -> 0, seg 6 (0.221023)
h("a_pyramidals_48[0].basal0 syn_SmallNet_bask_pyr_GABA_syn[3] = new GABA_syn(0.221023)")
h("a_baskets_12[4].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.711143), syn_SmallNet_bask_pyr_GABA_syn[3], 0.0, 0.0, 1.0))")
# Connection 4: 9, seg 0 (0.543666) -> 0, seg 6 (0.943817)
h("a_pyramidals_48[0].basal0 syn_SmallNet_bask_pyr_GABA_syn[4] = new GABA_syn(0.943817)")
h("a_baskets_12[9].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.5436661), syn_SmallNet_bask_pyr_GABA_syn[4], 0.0, 0.0, 1.0))")
# Connection 5: 3, seg 0 (0.380063) -> 0, seg 6 (0.543829)
h("a_pyramidals_48[0].basal0 syn_SmallNet_bask_pyr_GABA_syn[5] = new GABA_syn(0.543829)")
h("a_baskets_12[3].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.38006347), syn_SmallNet_bask_pyr_GABA_syn[5], 0.0, 0.0, 1.0))")
# Connection 6: 2, seg 0 (0.207300) -> 0, seg 6 (0.883827)
h("a_pyramidals_48[0].basal0 syn_SmallNet_bask_pyr_GABA_syn[6] = new GABA_syn(0.883827)")
h("a_baskets_12[2].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.20729977), syn_SmallNet_bask_pyr_GABA_syn[6], 0.0, 0.0, 1.0))")
# Connection 7: 11, seg 0 (0.104828) -> 1, seg 6 (0.289440)
h("a_pyramidals_48[1].basal0 syn_SmallNet_bask_pyr_GABA_syn[7] = new GABA_syn(0.289440)")
h("a_baskets_12[11].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.10482848), syn_SmallNet_bask_pyr_GABA_syn[7], 0.0, 0.0, 1.0))")
# Connection 8: 4, seg 0 (0.698590) -> 1, seg 6 (0.812063)
h("a_pyramidals_48[1].basal0 syn_SmallNet_bask_pyr_GABA_syn[8] = new GABA_syn(0.812063)")
h("a_baskets_12[4].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.6985896), syn_SmallNet_bask_pyr_GABA_syn[8], 0.0, 0.0, 1.0))")
# Connection 9: 0, seg 0 (0.607545) -> 1, seg 6 (0.920258)
h("a_pyramidals_48[1].basal0 syn_SmallNet_bask_pyr_GABA_syn[9] = new GABA_syn(0.920258)")
h("a_baskets_12[0].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.607545), syn_SmallNet_bask_pyr_GABA_syn[9], 0.0, 0.0, 1.0))")
# Connection 10: 9, seg 0 (0.078174) -> 1, seg 6 (0.516680)
h("a_pyramidals_48[1].basal0 syn_SmallNet_bask_pyr_GABA_syn[10] = new GABA_syn(0.516680)")
h("a_baskets_12[9].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.07817376), syn_SmallNet_bask_pyr_GABA_syn[10], 0.0, 0.0, 1.0))")
# Connection 11: 7, seg 0 (0.705522) -> 1, seg 6 (0.078418)
h("a_pyramidals_48[1].basal0 syn_SmallNet_bask_pyr_GABA_syn[11] = new GABA_syn(0.078418)")
h("a_baskets_12[7].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.7055223), syn_SmallNet_bask_pyr_GABA_syn[11], 0.0, 0.0, 1.0))")
# Connection 12: 9, seg 0 (0.668376) -> 1, seg 6 (0.634465)
h("a_pyramidals_48[1].basal0 syn_SmallNet_bask_pyr_GABA_syn[12] = new GABA_syn(0.634465)")
h("a_baskets_12[9].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.6683763), syn_SmallNet_bask_pyr_GABA_syn[12], 0.0, 0.0, 1.0))")
# Connection 13: 4, seg 0 (0.082915) -> 1, seg 6 (0.028853)
h("a_pyramidals_48[1].basal0 syn_SmallNet_bask_pyr_GABA_syn[13] = new GABA_syn(0.028853)")
h("a_baskets_12[4].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.082915485), syn_SmallNet_bask_pyr_GABA_syn[13], 0.0, 0.0, 1.0))")
# Connection 14: 6, seg 0 (0.985960) -> 2, seg 6 (0.049411)
h("a_pyramidals_48[2].basal0 syn_SmallNet_bask_pyr_GABA_syn[14] = new GABA_syn(0.049411)")
h("a_baskets_12[6].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.9859604), syn_SmallNet_bask_pyr_GABA_syn[14], 0.0, 0.0, 1.0))")
# Connection 15: 7, seg 0 (0.197669) -> 2, seg 6 (0.097334)
h("a_pyramidals_48[2].basal0 syn_SmallNet_bask_pyr_GABA_syn[15] = new GABA_syn(0.097334)")
h("a_baskets_12[7].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.19766873), syn_SmallNet_bask_pyr_GABA_syn[15], 0.0, 0.0, 1.0))")
# Connection 16: 2, seg 0 (0.072637) -> 2, seg 6 (0.748637)
h("a_pyramidals_48[2].basal0 syn_SmallNet_bask_pyr_GABA_syn[16] = new GABA_syn(0.748637)")
h("a_baskets_12[2].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.07263738), syn_SmallNet_bask_pyr_GABA_syn[16], 0.0, 0.0, 1.0))")
# Connection 17: 8, seg 0 (0.958698) -> 2, seg 6 (0.037808)
h("a_pyramidals_48[2].basal0 syn_SmallNet_bask_pyr_GABA_syn[17] = new GABA_syn(0.037808)")
h("a_baskets_12[8].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.9586979), syn_SmallNet_bask_pyr_GABA_syn[17], 0.0, 0.0, 1.0))")
# Connection 18: 5, seg 0 (0.011565) -> 2, seg 6 (0.165123)
h("a_pyramidals_48[2].basal0 syn_SmallNet_bask_pyr_GABA_syn[18] = new GABA_syn(0.165123)")
h("a_baskets_12[5].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.011565268), syn_SmallNet_bask_pyr_GABA_syn[18], 0.0, 0.0, 1.0))")
# Connection 19: 2, seg 0 (0.260580) -> 2, seg 6 (0.706609)
h("a_pyramidals_48[2].basal0 syn_SmallNet_bask_pyr_GABA_syn[19] = new GABA_syn(0.706609)")
h("a_baskets_12[2].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.2605797), syn_SmallNet_bask_pyr_GABA_syn[19], 0.0, 0.0, 1.0))")
# Connection 20: 8, seg 0 (0.311837) -> 2, seg 6 (0.198597)
h("a_pyramidals_48[2].basal0 syn_SmallNet_bask_pyr_GABA_syn[20] = new GABA_syn(0.198597)")
h("a_baskets_12[8].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.31183654), syn_SmallNet_bask_pyr_GABA_syn[20], 0.0, 0.0, 1.0))")
# Connection 21: 11, seg 0 (0.877830) -> 3, seg 6 (0.406193)
h("a_pyramidals_48[3].basal0 syn_SmallNet_bask_pyr_GABA_syn[21] = new GABA_syn(0.406193)")
h("a_baskets_12[11].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.87782955), syn_SmallNet_bask_pyr_GABA_syn[21], 0.0, 0.0, 1.0))")
# Connection 22: 7, seg 0 (0.294253) -> 3, seg 6 (0.564966)
h("a_pyramidals_48[3].basal0 syn_SmallNet_bask_pyr_GABA_syn[22] = new GABA_syn(0.564966)")
h("a_baskets_12[7].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.29425263), syn_SmallNet_bask_pyr_GABA_syn[22], 0.0, 0.0, 1.0))")
# Connection 23: 2, seg 0 (0.562029) -> 3, seg 6 (0.311786)
h("a_pyramidals_48[3].basal0 syn_SmallNet_bask_pyr_GABA_syn[23] = new GABA_syn(0.311786)")
h("a_baskets_12[2].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.5620294), syn_SmallNet_bask_pyr_GABA_syn[23], 0.0, 0.0, 1.0))")
# Connection 24: 2, seg 0 (0.061397) -> 3, seg 6 (0.579333)
h("a_pyramidals_48[3].basal0 syn_SmallNet_bask_pyr_GABA_syn[24] = new GABA_syn(0.579333)")
h("a_baskets_12[2].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.061397374), syn_SmallNet_bask_pyr_GABA_syn[24], 0.0, 0.0, 1.0))")
# Connection 25: 9, seg 0 (0.895003) -> 3, seg 6 (0.478321)
h("a_pyramidals_48[3].basal0 syn_SmallNet_bask_pyr_GABA_syn[25] = new GABA_syn(0.478321)")
h("a_baskets_12[9].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.8950035), syn_SmallNet_bask_pyr_GABA_syn[25], 0.0, 0.0, 1.0))")
# Connection 26: 8, seg 0 (0.287951) -> 3, seg 6 (0.931502)
h("a_pyramidals_48[3].basal0 syn_SmallNet_bask_pyr_GABA_syn[26] = new GABA_syn(0.931502)")
h("a_baskets_12[8].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.28795117), syn_SmallNet_bask_pyr_GABA_syn[26], 0.0, 0.0, 1.0))")
# Connection 27: 0, seg 0 (0.106948) -> 3, seg 6 (0.825754)
h("a_pyramidals_48[3].basal0 syn_SmallNet_bask_pyr_GABA_syn[27] = new GABA_syn(0.825754)")
h("a_baskets_12[0].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.106948495), syn_SmallNet_bask_pyr_GABA_syn[27], 0.0, 0.0, 1.0))")
# Connection 28: 8, seg 0 (0.452645) -> 4, seg 6 (0.192883)
h("a_pyramidals_48[4].basal0 syn_SmallNet_bask_pyr_GABA_syn[28] = new GABA_syn(0.192883)")
h("a_baskets_12[8].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.4526453), syn_SmallNet_bask_pyr_GABA_syn[28], 0.0, 0.0, 1.0))")
# Connection 29: 6, seg 0 (0.914046) -> 4, seg 6 (0.837636)
h("a_pyramidals_48[4].basal0 syn_SmallNet_bask_pyr_GABA_syn[29] = new GABA_syn(0.837636)")
h("a_baskets_12[6].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.91404647), syn_SmallNet_bask_pyr_GABA_syn[29], 0.0, 0.0, 1.0))")
# Connection 30: 0, seg 0 (0.805813) -> 4, seg 6 (0.347368)
h("a_pyramidals_48[4].basal0 syn_SmallNet_bask_pyr_GABA_syn[30] = new GABA_syn(0.347368)")
h("a_baskets_12[0].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.8058127), syn_SmallNet_bask_pyr_GABA_syn[30], 0.0, 0.0, 1.0))")
# Connection 31: 10, seg 0 (0.774189) -> 4, seg 6 (0.420474)
h("a_pyramidals_48[4].basal0 syn_SmallNet_bask_pyr_GABA_syn[31] = new GABA_syn(0.420474)")
h("a_baskets_12[10].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.774189), syn_SmallNet_bask_pyr_GABA_syn[31], 0.0, 0.0, 1.0))")
# Connection 32: 0, seg 0 (0.625777) -> 4, seg 6 (0.879418)
h("a_pyramidals_48[4].basal0 syn_SmallNet_bask_pyr_GABA_syn[32] = new GABA_syn(0.879418)")
h("a_baskets_12[0].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.6257765), syn_SmallNet_bask_pyr_GABA_syn[32], 0.0, 0.0, 1.0))")
# Connection 33: 7, seg 0 (0.519043) -> 4, seg 6 (0.953908)
h("a_pyramidals_48[4].basal0 syn_SmallNet_bask_pyr_GABA_syn[33] = new GABA_syn(0.953908)")
h("a_baskets_12[7].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.519043), syn_SmallNet_bask_pyr_GABA_syn[33], 0.0, 0.0, 1.0))")
# Connection 34: 2, seg 0 (0.794481) -> 4, seg 6 (0.766661)
h("a_pyramidals_48[4].basal0 syn_SmallNet_bask_pyr_GABA_syn[34] = new GABA_syn(0.766661)")
h("a_baskets_12[2].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.79448146), syn_SmallNet_bask_pyr_GABA_syn[34], 0.0, 0.0, 1.0))")
# Connection 35: 7, seg 0 (0.937915) -> 5, seg 6 (0.049911)
h("a_pyramidals_48[5].basal0 syn_SmallNet_bask_pyr_GABA_syn[35] = new GABA_syn(0.049911)")
h("a_baskets_12[7].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.93791527), syn_SmallNet_bask_pyr_GABA_syn[35], 0.0, 0.0, 1.0))")
# Connection 36: 0, seg 0 (0.900105) -> 5, seg 6 (0.346742)
h("a_pyramidals_48[5].basal0 syn_SmallNet_bask_pyr_GABA_syn[36] = new GABA_syn(0.346742)")
h("a_baskets_12[0].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.9001053), syn_SmallNet_bask_pyr_GABA_syn[36], 0.0, 0.0, 1.0))")
# Connection 37: 4, seg 0 (0.639618) -> 5, seg 6 (0.223846)
h("a_pyramidals_48[5].basal0 syn_SmallNet_bask_pyr_GABA_syn[37] = new GABA_syn(0.223846)")
h("a_baskets_12[4].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.6396184), syn_SmallNet_bask_pyr_GABA_syn[37], 0.0, 0.0, 1.0))")
# Connection 38: 1, seg 0 (0.273156) -> 5, seg 6 (0.737779)
h("a_pyramidals_48[5].basal0 syn_SmallNet_bask_pyr_GABA_syn[38] = new GABA_syn(0.737779)")
h("a_baskets_12[1].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.27315557), syn_SmallNet_bask_pyr_GABA_syn[38], 0.0, 0.0, 1.0))")
# Connection 39: 5, seg 0 (0.076576) -> 5, seg 6 (0.738230)
h("a_pyramidals_48[5].basal0 syn_SmallNet_bask_pyr_GABA_syn[39] = new GABA_syn(0.738230)")
h("a_baskets_12[5].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.07657552), syn_SmallNet_bask_pyr_GABA_syn[39], 0.0, 0.0, 1.0))")
# Connection 40: 6, seg 0 (0.453028) -> 5, seg 6 (0.870223)
h("a_pyramidals_48[5].basal0 syn_SmallNet_bask_pyr_GABA_syn[40] = new GABA_syn(0.870223)")
h("a_baskets_12[6].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.45302814), syn_SmallNet_bask_pyr_GABA_syn[40], 0.0, 0.0, 1.0))")
# Connection 41: 11, seg 0 (0.828824) -> 5, seg 6 (0.582970)
h("a_pyramidals_48[5].basal0 syn_SmallNet_bask_pyr_GABA_syn[41] = new GABA_syn(0.582970)")
h("a_baskets_12[11].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.82882404), syn_SmallNet_bask_pyr_GABA_syn[41], 0.0, 0.0, 1.0))")
# Connection 42: 6, seg 0 (0.638304) -> 6, seg 6 (0.431118)
h("a_pyramidals_48[6].basal0 syn_SmallNet_bask_pyr_GABA_syn[42] = new GABA_syn(0.431118)")
h("a_baskets_12[6].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.63830423), syn_SmallNet_bask_pyr_GABA_syn[42], 0.0, 0.0, 1.0))")
# Connection 43: 10, seg 0 (0.226497) -> 6, seg 6 (0.434760)
h("a_pyramidals_48[6].basal0 syn_SmallNet_bask_pyr_GABA_syn[43] = new GABA_syn(0.434760)")
h("a_baskets_12[10].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.22649741), syn_SmallNet_bask_pyr_GABA_syn[43], 0.0, 0.0, 1.0))")
# Connection 44: 9, seg 0 (0.015232) -> 6, seg 6 (0.885395)
h("a_pyramidals_48[6].basal0 syn_SmallNet_bask_pyr_GABA_syn[44] = new GABA_syn(0.885395)")
h("a_baskets_12[9].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.015231669), syn_SmallNet_bask_pyr_GABA_syn[44], 0.0, 0.0, 1.0))")
# Connection 45: 2, seg 0 (0.011251) -> 6, seg 6 (0.753587)
h("a_pyramidals_48[6].basal0 syn_SmallNet_bask_pyr_GABA_syn[45] = new GABA_syn(0.753587)")
h("a_baskets_12[2].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.01125139), syn_SmallNet_bask_pyr_GABA_syn[45], 0.0, 0.0, 1.0))")
# Connection 46: 10, seg 0 (0.386319) -> 6, seg 6 (0.044178)
h("a_pyramidals_48[6].basal0 syn_SmallNet_bask_pyr_GABA_syn[46] = new GABA_syn(0.044178)")
h("a_baskets_12[10].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.38631856), syn_SmallNet_bask_pyr_GABA_syn[46], 0.0, 0.0, 1.0))")
# Connection 47: 9, seg 0 (0.278193) -> 6, seg 6 (0.254789)
h("a_pyramidals_48[6].basal0 syn_SmallNet_bask_pyr_GABA_syn[47] = new GABA_syn(0.254789)")
h("a_baskets_12[9].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.2781933), syn_SmallNet_bask_pyr_GABA_syn[47], 0.0, 0.0, 1.0))")
# Connection 48: 6, seg 0 (0.981342) -> 6, seg 6 (0.869155)
h("a_pyramidals_48[6].basal0 syn_SmallNet_bask_pyr_GABA_syn[48] = new GABA_syn(0.869155)")
h("a_baskets_12[6].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.98134166), syn_SmallNet_bask_pyr_GABA_syn[48], 0.0, 0.0, 1.0))")
# Connection 49: 3, seg 0 (0.704527) -> 7, seg 6 (0.484690)
h("a_pyramidals_48[7].basal0 syn_SmallNet_bask_pyr_GABA_syn[49] = new GABA_syn(0.484690)")
h("a_baskets_12[3].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.7045268), syn_SmallNet_bask_pyr_GABA_syn[49], 0.0, 0.0, 1.0))")
# Connection 50: 5, seg 0 (0.103770) -> 7, seg 6 (0.485770)
h("a_pyramidals_48[7].basal0 syn_SmallNet_bask_pyr_GABA_syn[50] = new GABA_syn(0.485770)")
h("a_baskets_12[5].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.103770256), syn_SmallNet_bask_pyr_GABA_syn[50], 0.0, 0.0, 1.0))")
# Connection 51: 0, seg 0 (0.165083) -> 7, seg 6 (0.989909)
h("a_pyramidals_48[7].basal0 syn_SmallNet_bask_pyr_GABA_syn[51] = new GABA_syn(0.989909)")
h("a_baskets_12[0].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.16508323), syn_SmallNet_bask_pyr_GABA_syn[51], 0.0, 0.0, 1.0))")
# Connection 52: 0, seg 0 (0.479986) -> 7, seg 6 (0.484275)
h("a_pyramidals_48[7].basal0 syn_SmallNet_bask_pyr_GABA_syn[52] = new GABA_syn(0.484275)")
h("a_baskets_12[0].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.4799863), syn_SmallNet_bask_pyr_GABA_syn[52], 0.0, 0.0, 1.0))")
# Connection 53: 1, seg 0 (0.863782) -> 7, seg 6 (0.976192)
h("a_pyramidals_48[7].basal0 syn_SmallNet_bask_pyr_GABA_syn[53] = new GABA_syn(0.976192)")
h("a_baskets_12[1].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.8637824), syn_SmallNet_bask_pyr_GABA_syn[53], 0.0, 0.0, 1.0))")
# Connection 54: 6, seg 0 (0.475561) -> 7, seg 6 (0.510077)
h("a_pyramidals_48[7].basal0 syn_SmallNet_bask_pyr_GABA_syn[54] = new GABA_syn(0.510077)")
h("a_baskets_12[6].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.47556138), syn_SmallNet_bask_pyr_GABA_syn[54], 0.0, 0.0, 1.0))")
# Connection 55: 10, seg 0 (0.414561) -> 7, seg 6 (0.892123)
h("a_pyramidals_48[7].basal0 syn_SmallNet_bask_pyr_GABA_syn[55] = new GABA_syn(0.892123)")
h("a_baskets_12[10].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.4145611), syn_SmallNet_bask_pyr_GABA_syn[55], 0.0, 0.0, 1.0))")
# Connection 56: 8, seg 0 (0.244555) -> 8, seg 6 (0.836512)
h("a_pyramidals_48[8].basal0 syn_SmallNet_bask_pyr_GABA_syn[56] = new GABA_syn(0.836512)")
h("a_baskets_12[8].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.2445553), syn_SmallNet_bask_pyr_GABA_syn[56], 0.0, 0.0, 1.0))")
# Connection 57: 11, seg 0 (0.440001) -> 8, seg 6 (0.000572)
h("a_pyramidals_48[8].basal0 syn_SmallNet_bask_pyr_GABA_syn[57] = new GABA_syn(0.000572)")
h("a_baskets_12[11].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.44000125), syn_SmallNet_bask_pyr_GABA_syn[57], 0.0, 0.0, 1.0))")
# Connection 58: 11, seg 0 (0.769169) -> 8, seg 6 (0.044454)
h("a_pyramidals_48[8].basal0 syn_SmallNet_bask_pyr_GABA_syn[58] = new GABA_syn(0.044454)")
h("a_baskets_12[11].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.76916885), syn_SmallNet_bask_pyr_GABA_syn[58], 0.0, 0.0, 1.0))")
# Connection 59: 7, seg 0 (0.762518) -> 8, seg 6 (0.342898)
h("a_pyramidals_48[8].basal0 syn_SmallNet_bask_pyr_GABA_syn[59] = new GABA_syn(0.342898)")
h("a_baskets_12[7].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.76251787), syn_SmallNet_bask_pyr_GABA_syn[59], 0.0, 0.0, 1.0))")
# Connection 60: 8, seg 0 (0.647057) -> 8, seg 6 (0.814242)
h("a_pyramidals_48[8].basal0 syn_SmallNet_bask_pyr_GABA_syn[60] = new GABA_syn(0.814242)")
h("a_baskets_12[8].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.64705706), syn_SmallNet_bask_pyr_GABA_syn[60], 0.0, 0.0, 1.0))")
# Connection 61: 10, seg 0 (0.382620) -> 8, seg 6 (0.025041)
h("a_pyramidals_48[8].basal0 syn_SmallNet_bask_pyr_GABA_syn[61] = new GABA_syn(0.025041)")
h("a_baskets_12[10].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.38261962), syn_SmallNet_bask_pyr_GABA_syn[61], 0.0, 0.0, 1.0))")
# Connection 62: 4, seg 0 (0.269395) -> 8, seg 6 (0.680824)
h("a_pyramidals_48[8].basal0 syn_SmallNet_bask_pyr_GABA_syn[62] = new GABA_syn(0.680824)")
h("a_baskets_12[4].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.26939458), syn_SmallNet_bask_pyr_GABA_syn[62], 0.0, 0.0, 1.0))")
# Connection 63: 5, seg 0 (0.251359) -> 9, seg 6 (0.214427)
h("a_pyramidals_48[9].basal0 syn_SmallNet_bask_pyr_GABA_syn[63] = new GABA_syn(0.214427)")
h("a_baskets_12[5].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.25135857), syn_SmallNet_bask_pyr_GABA_syn[63], 0.0, 0.0, 1.0))")
# Connection 64: 7, seg 0 (0.495579) -> 9, seg 6 (0.406760)
h("a_pyramidals_48[9].basal0 syn_SmallNet_bask_pyr_GABA_syn[64] = new GABA_syn(0.406760)")
h("a_baskets_12[7].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.495579), syn_SmallNet_bask_pyr_GABA_syn[64], 0.0, 0.0, 1.0))")
# Connection 65: 3, seg 0 (0.417129) -> 9, seg 6 (0.179108)
h("a_pyramidals_48[9].basal0 syn_SmallNet_bask_pyr_GABA_syn[65] = new GABA_syn(0.179108)")
h("a_baskets_12[3].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.41712946), syn_SmallNet_bask_pyr_GABA_syn[65], 0.0, 0.0, 1.0))")
# Connection 66: 3, seg 0 (0.010382) -> 9, seg 6 (0.262227)
h("a_pyramidals_48[9].basal0 syn_SmallNet_bask_pyr_GABA_syn[66] = new GABA_syn(0.262227)")
h("a_baskets_12[3].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.010382116), syn_SmallNet_bask_pyr_GABA_syn[66], 0.0, 0.0, 1.0))")
# Connection 67: 7, seg 0 (0.599335) -> 9, seg 6 (0.539860)
h("a_pyramidals_48[9].basal0 syn_SmallNet_bask_pyr_GABA_syn[67] = new GABA_syn(0.539860)")
h("a_baskets_12[7].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.5993351), syn_SmallNet_bask_pyr_GABA_syn[67], 0.0, 0.0, 1.0))")
# Connection 68: 7, seg 0 (0.315976) -> 9, seg 6 (0.048173)
h("a_pyramidals_48[9].basal0 syn_SmallNet_bask_pyr_GABA_syn[68] = new GABA_syn(0.048173)")
h("a_baskets_12[7].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.31597584), syn_SmallNet_bask_pyr_GABA_syn[68], 0.0, 0.0, 1.0))")
# Connection 69: 6, seg 0 (0.361733) -> 9, seg 6 (0.034874)
h("a_pyramidals_48[9].basal0 syn_SmallNet_bask_pyr_GABA_syn[69] = new GABA_syn(0.034874)")
h("a_baskets_12[6].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.36173314), syn_SmallNet_bask_pyr_GABA_syn[69], 0.0, 0.0, 1.0))")
# Connection 70: 6, seg 0 (0.724876) -> 10, seg 6 (0.260970)
h("a_pyramidals_48[10].basal0 syn_SmallNet_bask_pyr_GABA_syn[70] = new GABA_syn(0.260970)")
h("a_baskets_12[6].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.72487587), syn_SmallNet_bask_pyr_GABA_syn[70], 0.0, 0.0, 1.0))")
# Connection 71: 8, seg 0 (0.541176) -> 10, seg 6 (0.728528)
h("a_pyramidals_48[10].basal0 syn_SmallNet_bask_pyr_GABA_syn[71] = new GABA_syn(0.728528)")
h("a_baskets_12[8].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.5411758), syn_SmallNet_bask_pyr_GABA_syn[71], 0.0, 0.0, 1.0))")
# Connection 72: 11, seg 0 (0.927467) -> 10, seg 6 (0.937447)
h("a_pyramidals_48[10].basal0 syn_SmallNet_bask_pyr_GABA_syn[72] = new GABA_syn(0.937447)")
h("a_baskets_12[11].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.9274669), syn_SmallNet_bask_pyr_GABA_syn[72], 0.0, 0.0, 1.0))")
# Connection 73: 3, seg 0 (0.744800) -> 10, seg 6 (0.496552)
h("a_pyramidals_48[10].basal0 syn_SmallNet_bask_pyr_GABA_syn[73] = new GABA_syn(0.496552)")
h("a_baskets_12[3].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.7448001), syn_SmallNet_bask_pyr_GABA_syn[73], 0.0, 0.0, 1.0))")
# Connection 74: 10, seg 0 (0.596383) -> 10, seg 6 (0.514630)
h("a_pyramidals_48[10].basal0 syn_SmallNet_bask_pyr_GABA_syn[74] = new GABA_syn(0.514630)")
h("a_baskets_12[10].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.59638256), syn_SmallNet_bask_pyr_GABA_syn[74], 0.0, 0.0, 1.0))")
# Connection 75: 1, seg 0 (0.176409) -> 10, seg 6 (0.609381)
h("a_pyramidals_48[10].basal0 syn_SmallNet_bask_pyr_GABA_syn[75] = new GABA_syn(0.609381)")
h("a_baskets_12[1].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.17640907), syn_SmallNet_bask_pyr_GABA_syn[75], 0.0, 0.0, 1.0))")
# Connection 76: 1, seg 0 (0.438597) -> 10, seg 6 (0.829391)
h("a_pyramidals_48[10].basal0 syn_SmallNet_bask_pyr_GABA_syn[76] = new GABA_syn(0.829391)")
h("a_baskets_12[1].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.43859696), syn_SmallNet_bask_pyr_GABA_syn[76], 0.0, 0.0, 1.0))")
# Connection 77: 7, seg 0 (0.887790) -> 11, seg 6 (0.304551)
h("a_pyramidals_48[11].basal0 syn_SmallNet_bask_pyr_GABA_syn[77] = new GABA_syn(0.304551)")
h("a_baskets_12[7].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.8877901), syn_SmallNet_bask_pyr_GABA_syn[77], 0.0, 0.0, 1.0))")
# Connection 78: 5, seg 0 (0.346523) -> 11, seg 6 (0.646991)
h("a_pyramidals_48[11].basal0 syn_SmallNet_bask_pyr_GABA_syn[78] = new GABA_syn(0.646991)")
h("a_baskets_12[5].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.34652328), syn_SmallNet_bask_pyr_GABA_syn[78], 0.0, 0.0, 1.0))")
# Connection 79: 11, seg 0 (0.236703) -> 11, seg 6 (0.350992)
h("a_pyramidals_48[11].basal0 syn_SmallNet_bask_pyr_GABA_syn[79] = new GABA_syn(0.350992)")
h("a_baskets_12[11].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.23670274), syn_SmallNet_bask_pyr_GABA_syn[79], 0.0, 0.0, 1.0))")
# Connection 80: 3, seg 0 (0.015180) -> 11, seg 6 (0.473185)
h("a_pyramidals_48[11].basal0 syn_SmallNet_bask_pyr_GABA_syn[80] = new GABA_syn(0.473185)")
h("a_baskets_12[3].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.015180349), syn_SmallNet_bask_pyr_GABA_syn[80], 0.0, 0.0, 1.0))")
# Connection 81: 0, seg 0 (0.626624) -> 11, seg 6 (0.717471)
h("a_pyramidals_48[11].basal0 syn_SmallNet_bask_pyr_GABA_syn[81] = new GABA_syn(0.717471)")
h("a_baskets_12[0].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.62662435), syn_SmallNet_bask_pyr_GABA_syn[81], 0.0, 0.0, 1.0))")
# Connection 82: 3, seg 0 (0.652789) -> 11, seg 6 (0.673451)
h("a_pyramidals_48[11].basal0 syn_SmallNet_bask_pyr_GABA_syn[82] = new GABA_syn(0.673451)")
h("a_baskets_12[3].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.65278906), syn_SmallNet_bask_pyr_GABA_syn[82], 0.0, 0.0, 1.0))")
# Connection 83: 0, seg 0 (0.195617) -> 11, seg 6 (0.001483)
h("a_pyramidals_48[11].basal0 syn_SmallNet_bask_pyr_GABA_syn[83] = new GABA_syn(0.001483)")
h("a_baskets_12[0].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.19561714), syn_SmallNet_bask_pyr_GABA_syn[83], 0.0, 0.0, 1.0))")
# Connection 84: 9, seg 0 (0.257934) -> 12, seg 6 (0.051011)
h("a_pyramidals_48[12].basal0 syn_SmallNet_bask_pyr_GABA_syn[84] = new GABA_syn(0.051011)")
h("a_baskets_12[9].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.25793403), syn_SmallNet_bask_pyr_GABA_syn[84], 0.0, 0.0, 1.0))")
# Connection 85: 5, seg 0 (0.267787) -> 12, seg 6 (0.359384)
h("a_pyramidals_48[12].basal0 syn_SmallNet_bask_pyr_GABA_syn[85] = new GABA_syn(0.359384)")
h("a_baskets_12[5].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.2677868), syn_SmallNet_bask_pyr_GABA_syn[85], 0.0, 0.0, 1.0))")
# Connection 86: 9, seg 0 (0.403362) -> 12, seg 6 (0.379242)
h("a_pyramidals_48[12].basal0 syn_SmallNet_bask_pyr_GABA_syn[86] = new GABA_syn(0.379242)")
h("a_baskets_12[9].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.40336198), syn_SmallNet_bask_pyr_GABA_syn[86], 0.0, 0.0, 1.0))")
# Connection 87: 11, seg 0 (0.626395) -> 12, seg 6 (0.400424)
h("a_pyramidals_48[12].basal0 syn_SmallNet_bask_pyr_GABA_syn[87] = new GABA_syn(0.400424)")
h("a_baskets_12[11].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.6263949), syn_SmallNet_bask_pyr_GABA_syn[87], 0.0, 0.0, 1.0))")
# Connection 88: 3, seg 0 (0.048933) -> 12, seg 6 (0.105130)
h("a_pyramidals_48[12].basal0 syn_SmallNet_bask_pyr_GABA_syn[88] = new GABA_syn(0.105130)")
h("a_baskets_12[3].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.048933268), syn_SmallNet_bask_pyr_GABA_syn[88], 0.0, 0.0, 1.0))")
# Connection 89: 4, seg 0 (0.882656) -> 12, seg 6 (0.323138)
h("a_pyramidals_48[12].basal0 syn_SmallNet_bask_pyr_GABA_syn[89] = new GABA_syn(0.323138)")
h("a_baskets_12[4].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.8826562), syn_SmallNet_bask_pyr_GABA_syn[89], 0.0, 0.0, 1.0))")
# Connection 90: 7, seg 0 (0.934734) -> 12, seg 6 (0.803401)
h("a_pyramidals_48[12].basal0 syn_SmallNet_bask_pyr_GABA_syn[90] = new GABA_syn(0.803401)")
h("a_baskets_12[7].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.93473446), syn_SmallNet_bask_pyr_GABA_syn[90], 0.0, 0.0, 1.0))")
# Connection 91: 8, seg 0 (0.483177) -> 13, seg 6 (0.342724)
h("a_pyramidals_48[13].basal0 syn_SmallNet_bask_pyr_GABA_syn[91] = new GABA_syn(0.342724)")
h("a_baskets_12[8].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.4831773), syn_SmallNet_bask_pyr_GABA_syn[91], 0.0, 0.0, 1.0))")
# Connection 92: 10, seg 0 (0.729079) -> 13, seg 6 (0.362696)
h("a_pyramidals_48[13].basal0 syn_SmallNet_bask_pyr_GABA_syn[92] = new GABA_syn(0.362696)")
h("a_baskets_12[10].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.7290791), syn_SmallNet_bask_pyr_GABA_syn[92], 0.0, 0.0, 1.0))")
# Connection 93: 10, seg 0 (0.413038) -> 13, seg 6 (0.763491)
h("a_pyramidals_48[13].basal0 syn_SmallNet_bask_pyr_GABA_syn[93] = new GABA_syn(0.763491)")
h("a_baskets_12[10].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.41303766), syn_SmallNet_bask_pyr_GABA_syn[93], 0.0, 0.0, 1.0))")
# Connection 94: 4, seg 0 (0.040534) -> 13, seg 6 (0.785311)
h("a_pyramidals_48[13].basal0 syn_SmallNet_bask_pyr_GABA_syn[94] = new GABA_syn(0.785311)")
h("a_baskets_12[4].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.04053414), syn_SmallNet_bask_pyr_GABA_syn[94], 0.0, 0.0, 1.0))")
# Connection 95: 2, seg 0 (0.590092) -> 13, seg 6 (0.599408)
h("a_pyramidals_48[13].basal0 syn_SmallNet_bask_pyr_GABA_syn[95] = new GABA_syn(0.599408)")
h("a_baskets_12[2].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.5900917), syn_SmallNet_bask_pyr_GABA_syn[95], 0.0, 0.0, 1.0))")
# Connection 96: 6, seg 0 (0.321614) -> 13, seg 6 (0.008175)
h("a_pyramidals_48[13].basal0 syn_SmallNet_bask_pyr_GABA_syn[96] = new GABA_syn(0.008175)")
h("a_baskets_12[6].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.32161438), syn_SmallNet_bask_pyr_GABA_syn[96], 0.0, 0.0, 1.0))")
# Connection 97: 6, seg 0 (0.430262) -> 13, seg 6 (0.053999)
h("a_pyramidals_48[13].basal0 syn_SmallNet_bask_pyr_GABA_syn[97] = new GABA_syn(0.053999)")
h("a_baskets_12[6].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.4302619), syn_SmallNet_bask_pyr_GABA_syn[97], 0.0, 0.0, 1.0))")
# Connection 98: 4, seg 0 (0.109088) -> 14, seg 6 (0.991833)
h("a_pyramidals_48[14].basal0 syn_SmallNet_bask_pyr_GABA_syn[98] = new GABA_syn(0.991833)")
h("a_baskets_12[4].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.10908818), syn_SmallNet_bask_pyr_GABA_syn[98], 0.0, 0.0, 1.0))")
# Connection 99: 7, seg 0 (0.607143) -> 14, seg 6 (0.121294)
h("a_pyramidals_48[14].basal0 syn_SmallNet_bask_pyr_GABA_syn[99] = new GABA_syn(0.121294)")
h("a_baskets_12[7].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.6071434), syn_SmallNet_bask_pyr_GABA_syn[99], 0.0, 0.0, 1.0))")
# Connection 100: 2, seg 0 (0.759406) -> 14, seg 6 (0.607410)
h("a_pyramidals_48[14].basal0 syn_SmallNet_bask_pyr_GABA_syn[100] = new GABA_syn(0.607410)")
h("a_baskets_12[2].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.75940555), syn_SmallNet_bask_pyr_GABA_syn[100], 0.0, 0.0, 1.0))")
# Connection 101: 4, seg 0 (0.506639) -> 14, seg 6 (0.322637)
h("a_pyramidals_48[14].basal0 syn_SmallNet_bask_pyr_GABA_syn[101] = new GABA_syn(0.322637)")
h("a_baskets_12[4].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.50663924), syn_SmallNet_bask_pyr_GABA_syn[101], 0.0, 0.0, 1.0))")
# Connection 102: 10, seg 0 (0.462564) -> 14, seg 6 (0.084055)
h("a_pyramidals_48[14].basal0 syn_SmallNet_bask_pyr_GABA_syn[102] = new GABA_syn(0.084055)")
h("a_baskets_12[10].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.46256435), syn_SmallNet_bask_pyr_GABA_syn[102], 0.0, 0.0, 1.0))")
# Connection 103: 9, seg 0 (0.872264) -> 14, seg 6 (0.040304)
h("a_pyramidals_48[14].basal0 syn_SmallNet_bask_pyr_GABA_syn[103] = new GABA_syn(0.040304)")
h("a_baskets_12[9].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.87226444), syn_SmallNet_bask_pyr_GABA_syn[103], 0.0, 0.0, 1.0))")
# Connection 104: 4, seg 0 (0.168131) -> 14, seg 6 (0.458537)
h("a_pyramidals_48[14].basal0 syn_SmallNet_bask_pyr_GABA_syn[104] = new GABA_syn(0.458537)")
h("a_baskets_12[4].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.16813141), syn_SmallNet_bask_pyr_GABA_syn[104], 0.0, 0.0, 1.0))")
# Connection 105: 9, seg 0 (0.627917) -> 15, seg 6 (0.824363)
h("a_pyramidals_48[15].basal0 syn_SmallNet_bask_pyr_GABA_syn[105] = new GABA_syn(0.824363)")
h("a_baskets_12[9].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.62791675), syn_SmallNet_bask_pyr_GABA_syn[105], 0.0, 0.0, 1.0))")
# Connection 106: 4, seg 0 (0.598615) -> 15, seg 6 (0.880085)
h("a_pyramidals_48[15].basal0 syn_SmallNet_bask_pyr_GABA_syn[106] = new GABA_syn(0.880085)")
h("a_baskets_12[4].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.59861475), syn_SmallNet_bask_pyr_GABA_syn[106], 0.0, 0.0, 1.0))")
# Connection 107: 3, seg 0 (0.550111) -> 15, seg 6 (0.932550)
h("a_pyramidals_48[15].basal0 syn_SmallNet_bask_pyr_GABA_syn[107] = new GABA_syn(0.932550)")
h("a_baskets_12[3].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.5501108), syn_SmallNet_bask_pyr_GABA_syn[107], 0.0, 0.0, 1.0))")
# Connection 108: 7, seg 0 (0.837482) -> 15, seg 6 (0.395665)
h("a_pyramidals_48[15].basal0 syn_SmallNet_bask_pyr_GABA_syn[108] = new GABA_syn(0.395665)")
h("a_baskets_12[7].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.8374816), syn_SmallNet_bask_pyr_GABA_syn[108], 0.0, 0.0, 1.0))")
# Connection 109: 10, seg 0 (0.614535) -> 15, seg 6 (0.201825)
h("a_pyramidals_48[15].basal0 syn_SmallNet_bask_pyr_GABA_syn[109] = new GABA_syn(0.201825)")
h("a_baskets_12[10].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.614535), syn_SmallNet_bask_pyr_GABA_syn[109], 0.0, 0.0, 1.0))")
# Connection 110: 1, seg 0 (0.139789) -> 15, seg 6 (0.185367)
h("a_pyramidals_48[15].basal0 syn_SmallNet_bask_pyr_GABA_syn[110] = new GABA_syn(0.185367)")
h("a_baskets_12[1].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.13978934), syn_SmallNet_bask_pyr_GABA_syn[110], 0.0, 0.0, 1.0))")
# Connection 111: 3, seg 0 (0.439513) -> 15, seg 6 (0.882551)
h("a_pyramidals_48[15].basal0 syn_SmallNet_bask_pyr_GABA_syn[111] = new GABA_syn(0.882551)")
h("a_baskets_12[3].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.43951267), syn_SmallNet_bask_pyr_GABA_syn[111], 0.0, 0.0, 1.0))")
# Connection 112: 4, seg 0 (0.633624) -> 16, seg 6 (0.373716)
h("a_pyramidals_48[16].basal0 syn_SmallNet_bask_pyr_GABA_syn[112] = new GABA_syn(0.373716)")
h("a_baskets_12[4].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.6336242), syn_SmallNet_bask_pyr_GABA_syn[112], 0.0, 0.0, 1.0))")
# Connection 113: 1, seg 0 (0.203090) -> 16, seg 6 (0.573130)
h("a_pyramidals_48[16].basal0 syn_SmallNet_bask_pyr_GABA_syn[113] = new GABA_syn(0.573130)")
h("a_baskets_12[1].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.20309049), syn_SmallNet_bask_pyr_GABA_syn[113], 0.0, 0.0, 1.0))")
# Connection 114: 6, seg 0 (0.486940) -> 16, seg 6 (0.790210)
h("a_pyramidals_48[16].basal0 syn_SmallNet_bask_pyr_GABA_syn[114] = new GABA_syn(0.790210)")
h("a_baskets_12[6].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.4869399), syn_SmallNet_bask_pyr_GABA_syn[114], 0.0, 0.0, 1.0))")
# Connection 115: 3, seg 0 (0.076508) -> 16, seg 6 (0.775817)
h("a_pyramidals_48[16].basal0 syn_SmallNet_bask_pyr_GABA_syn[115] = new GABA_syn(0.775817)")
h("a_baskets_12[3].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.07650781), syn_SmallNet_bask_pyr_GABA_syn[115], 0.0, 0.0, 1.0))")
# Connection 116: 5, seg 0 (0.996099) -> 16, seg 6 (0.748391)
h("a_pyramidals_48[16].basal0 syn_SmallNet_bask_pyr_GABA_syn[116] = new GABA_syn(0.748391)")
h("a_baskets_12[5].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.9960995), syn_SmallNet_bask_pyr_GABA_syn[116], 0.0, 0.0, 1.0))")
# Connection 117: 0, seg 0 (0.952064) -> 16, seg 6 (0.459441)
h("a_pyramidals_48[16].basal0 syn_SmallNet_bask_pyr_GABA_syn[117] = new GABA_syn(0.459441)")
h("a_baskets_12[0].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.952064), syn_SmallNet_bask_pyr_GABA_syn[117], 0.0, 0.0, 1.0))")
# Connection 118: 9, seg 0 (0.357623) -> 16, seg 6 (0.355343)
h("a_pyramidals_48[16].basal0 syn_SmallNet_bask_pyr_GABA_syn[118] = new GABA_syn(0.355343)")
h("a_baskets_12[9].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.35762298), syn_SmallNet_bask_pyr_GABA_syn[118], 0.0, 0.0, 1.0))")
# Connection 119: 4, seg 0 (0.626624) -> 17, seg 6 (0.706384)
h("a_pyramidals_48[17].basal0 syn_SmallNet_bask_pyr_GABA_syn[119] = new GABA_syn(0.706384)")
h("a_baskets_12[4].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.6266238), syn_SmallNet_bask_pyr_GABA_syn[119], 0.0, 0.0, 1.0))")
# Connection 120: 1, seg 0 (0.970346) -> 17, seg 6 (0.374789)
h("a_pyramidals_48[17].basal0 syn_SmallNet_bask_pyr_GABA_syn[120] = new GABA_syn(0.374789)")
h("a_baskets_12[1].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.9703456), syn_SmallNet_bask_pyr_GABA_syn[120], 0.0, 0.0, 1.0))")
# Connection 121: 2, seg 0 (0.021515) -> 17, seg 6 (0.513781)
h("a_pyramidals_48[17].basal0 syn_SmallNet_bask_pyr_GABA_syn[121] = new GABA_syn(0.513781)")
h("a_baskets_12[2].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.021514714), syn_SmallNet_bask_pyr_GABA_syn[121], 0.0, 0.0, 1.0))")
# Connection 122: 10, seg 0 (0.023234) -> 17, seg 6 (0.134808)
h("a_pyramidals_48[17].basal0 syn_SmallNet_bask_pyr_GABA_syn[122] = new GABA_syn(0.134808)")
h("a_baskets_12[10].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.023233652), syn_SmallNet_bask_pyr_GABA_syn[122], 0.0, 0.0, 1.0))")
# Connection 123: 10, seg 0 (0.680915) -> 17, seg 6 (0.597374)
h("a_pyramidals_48[17].basal0 syn_SmallNet_bask_pyr_GABA_syn[123] = new GABA_syn(0.597374)")
h("a_baskets_12[10].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.68091536), syn_SmallNet_bask_pyr_GABA_syn[123], 0.0, 0.0, 1.0))")
# Connection 124: 0, seg 0 (0.774549) -> 17, seg 6 (0.181919)
h("a_pyramidals_48[17].basal0 syn_SmallNet_bask_pyr_GABA_syn[124] = new GABA_syn(0.181919)")
h("a_baskets_12[0].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.77454925), syn_SmallNet_bask_pyr_GABA_syn[124], 0.0, 0.0, 1.0))")
# Connection 125: 0, seg 0 (0.788385) -> 17, seg 6 (0.571031)
h("a_pyramidals_48[17].basal0 syn_SmallNet_bask_pyr_GABA_syn[125] = new GABA_syn(0.571031)")
h("a_baskets_12[0].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.7883853), syn_SmallNet_bask_pyr_GABA_syn[125], 0.0, 0.0, 1.0))")
# Connection 126: 3, seg 0 (0.390243) -> 18, seg 6 (0.819805)
h("a_pyramidals_48[18].basal0 syn_SmallNet_bask_pyr_GABA_syn[126] = new GABA_syn(0.819805)")
h("a_baskets_12[3].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.39024252), syn_SmallNet_bask_pyr_GABA_syn[126], 0.0, 0.0, 1.0))")
# Connection 127: 11, seg 0 (0.881958) -> 18, seg 6 (0.948839)
h("a_pyramidals_48[18].basal0 syn_SmallNet_bask_pyr_GABA_syn[127] = new GABA_syn(0.948839)")
h("a_baskets_12[11].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.8819576), syn_SmallNet_bask_pyr_GABA_syn[127], 0.0, 0.0, 1.0))")
# Connection 128: 2, seg 0 (0.429610) -> 18, seg 6 (0.047767)
h("a_pyramidals_48[18].basal0 syn_SmallNet_bask_pyr_GABA_syn[128] = new GABA_syn(0.047767)")
h("a_baskets_12[2].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.42961025), syn_SmallNet_bask_pyr_GABA_syn[128], 0.0, 0.0, 1.0))")
# Connection 129: 9, seg 0 (0.210009) -> 18, seg 6 (0.740018)
h("a_pyramidals_48[18].basal0 syn_SmallNet_bask_pyr_GABA_syn[129] = new GABA_syn(0.740018)")
h("a_baskets_12[9].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.2100085), syn_SmallNet_bask_pyr_GABA_syn[129], 0.0, 0.0, 1.0))")
# Connection 130: 4, seg 0 (0.399309) -> 18, seg 6 (0.060361)
h("a_pyramidals_48[18].basal0 syn_SmallNet_bask_pyr_GABA_syn[130] = new GABA_syn(0.060361)")
h("a_baskets_12[4].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.39930946), syn_SmallNet_bask_pyr_GABA_syn[130], 0.0, 0.0, 1.0))")
# Connection 131: 8, seg 0 (0.219680) -> 18, seg 6 (0.822373)
h("a_pyramidals_48[18].basal0 syn_SmallNet_bask_pyr_GABA_syn[131] = new GABA_syn(0.822373)")
h("a_baskets_12[8].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.21968007), syn_SmallNet_bask_pyr_GABA_syn[131], 0.0, 0.0, 1.0))")
# Connection 132: 1, seg 0 (0.908747) -> 18, seg 6 (0.769725)
h("a_pyramidals_48[18].basal0 syn_SmallNet_bask_pyr_GABA_syn[132] = new GABA_syn(0.769725)")
h("a_baskets_12[1].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.90874654), syn_SmallNet_bask_pyr_GABA_syn[132], 0.0, 0.0, 1.0))")
# Connection 133: 2, seg 0 (0.460427) -> 19, seg 6 (0.680859)
h("a_pyramidals_48[19].basal0 syn_SmallNet_bask_pyr_GABA_syn[133] = new GABA_syn(0.680859)")
h("a_baskets_12[2].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.46042717), syn_SmallNet_bask_pyr_GABA_syn[133], 0.0, 0.0, 1.0))")
# Connection 134: 1, seg 0 (0.135945) -> 19, seg 6 (0.251221)
h("a_pyramidals_48[19].basal0 syn_SmallNet_bask_pyr_GABA_syn[134] = new GABA_syn(0.251221)")
h("a_baskets_12[1].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.1359452), syn_SmallNet_bask_pyr_GABA_syn[134], 0.0, 0.0, 1.0))")
# Connection 135: 9, seg 0 (0.498110) -> 19, seg 6 (0.692635)
h("a_pyramidals_48[19].basal0 syn_SmallNet_bask_pyr_GABA_syn[135] = new GABA_syn(0.692635)")
h("a_baskets_12[9].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.49811018), syn_SmallNet_bask_pyr_GABA_syn[135], 0.0, 0.0, 1.0))")
# Connection 136: 1, seg 0 (0.925562) -> 19, seg 6 (0.976249)
h("a_pyramidals_48[19].basal0 syn_SmallNet_bask_pyr_GABA_syn[136] = new GABA_syn(0.976249)")
h("a_baskets_12[1].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.92556244), syn_SmallNet_bask_pyr_GABA_syn[136], 0.0, 0.0, 1.0))")
# Connection 137: 5, seg 0 (0.118672) -> 19, seg 6 (0.230367)
h("a_pyramidals_48[19].basal0 syn_SmallNet_bask_pyr_GABA_syn[137] = new GABA_syn(0.230367)")
h("a_baskets_12[5].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.11867213), syn_SmallNet_bask_pyr_GABA_syn[137], 0.0, 0.0, 1.0))")
# Connection 138: 5, seg 0 (0.699378) -> 19, seg 6 (0.005939)
h("a_pyramidals_48[19].basal0 syn_SmallNet_bask_pyr_GABA_syn[138] = new GABA_syn(0.005939)")
h("a_baskets_12[5].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.69937766), syn_SmallNet_bask_pyr_GABA_syn[138], 0.0, 0.0, 1.0))")
# Connection 139: 5, seg 0 (0.512656) -> 19, seg 6 (0.010087)
h("a_pyramidals_48[19].basal0 syn_SmallNet_bask_pyr_GABA_syn[139] = new GABA_syn(0.010087)")
h("a_baskets_12[5].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.51265585), syn_SmallNet_bask_pyr_GABA_syn[139], 0.0, 0.0, 1.0))")
# Connection 140: 5, seg 0 (0.885046) -> 20, seg 6 (0.595453)
h("a_pyramidals_48[20].basal0 syn_SmallNet_bask_pyr_GABA_syn[140] = new GABA_syn(0.595453)")
h("a_baskets_12[5].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.8850461), syn_SmallNet_bask_pyr_GABA_syn[140], 0.0, 0.0, 1.0))")
# Connection 141: 6, seg 0 (0.676018) -> 20, seg 6 (0.578137)
h("a_pyramidals_48[20].basal0 syn_SmallNet_bask_pyr_GABA_syn[141] = new GABA_syn(0.578137)")
h("a_baskets_12[6].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.67601794), syn_SmallNet_bask_pyr_GABA_syn[141], 0.0, 0.0, 1.0))")
# Connection 142: 3, seg 0 (0.200840) -> 20, seg 6 (0.924882)
h("a_pyramidals_48[20].basal0 syn_SmallNet_bask_pyr_GABA_syn[142] = new GABA_syn(0.924882)")
h("a_baskets_12[3].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.20084047), syn_SmallNet_bask_pyr_GABA_syn[142], 0.0, 0.0, 1.0))")
# Connection 143: 0, seg 0 (0.757760) -> 20, seg 6 (0.267021)
h("a_pyramidals_48[20].basal0 syn_SmallNet_bask_pyr_GABA_syn[143] = new GABA_syn(0.267021)")
h("a_baskets_12[0].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.75775963), syn_SmallNet_bask_pyr_GABA_syn[143], 0.0, 0.0, 1.0))")
# Connection 144: 3, seg 0 (0.101617) -> 20, seg 6 (0.231870)
h("a_pyramidals_48[20].basal0 syn_SmallNet_bask_pyr_GABA_syn[144] = new GABA_syn(0.231870)")
h("a_baskets_12[3].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.101617396), syn_SmallNet_bask_pyr_GABA_syn[144], 0.0, 0.0, 1.0))")
# Connection 145: 11, seg 0 (0.300641) -> 20, seg 6 (0.975701)
h("a_pyramidals_48[20].basal0 syn_SmallNet_bask_pyr_GABA_syn[145] = new GABA_syn(0.975701)")
h("a_baskets_12[11].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.30064094), syn_SmallNet_bask_pyr_GABA_syn[145], 0.0, 0.0, 1.0))")
# Connection 146: 3, seg 0 (0.243980) -> 20, seg 6 (0.059321)
h("a_pyramidals_48[20].basal0 syn_SmallNet_bask_pyr_GABA_syn[146] = new GABA_syn(0.059321)")
h("a_baskets_12[3].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.24397999), syn_SmallNet_bask_pyr_GABA_syn[146], 0.0, 0.0, 1.0))")
# Connection 147: 1, seg 0 (0.355228) -> 21, seg 6 (0.002184)
h("a_pyramidals_48[21].basal0 syn_SmallNet_bask_pyr_GABA_syn[147] = new GABA_syn(0.002184)")
h("a_baskets_12[1].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.35522795), syn_SmallNet_bask_pyr_GABA_syn[147], 0.0, 0.0, 1.0))")
# Connection 148: 3, seg 0 (0.244552) -> 21, seg 6 (0.610371)
h("a_pyramidals_48[21].basal0 syn_SmallNet_bask_pyr_GABA_syn[148] = new GABA_syn(0.610371)")
h("a_baskets_12[3].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.24455237), syn_SmallNet_bask_pyr_GABA_syn[148], 0.0, 0.0, 1.0))")
# Connection 149: 9, seg 0 (0.212613) -> 21, seg 6 (0.747416)
h("a_pyramidals_48[21].basal0 syn_SmallNet_bask_pyr_GABA_syn[149] = new GABA_syn(0.747416)")
h("a_baskets_12[9].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.21261328), syn_SmallNet_bask_pyr_GABA_syn[149], 0.0, 0.0, 1.0))")
# Connection 150: 6, seg 0 (0.160704) -> 21, seg 6 (0.139763)
h("a_pyramidals_48[21].basal0 syn_SmallNet_bask_pyr_GABA_syn[150] = new GABA_syn(0.139763)")
h("a_baskets_12[6].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.16070378), syn_SmallNet_bask_pyr_GABA_syn[150], 0.0, 0.0, 1.0))")
# Connection 151: 1, seg 0 (0.437289) -> 21, seg 6 (0.651879)
h("a_pyramidals_48[21].basal0 syn_SmallNet_bask_pyr_GABA_syn[151] = new GABA_syn(0.651879)")
h("a_baskets_12[1].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.43728888), syn_SmallNet_bask_pyr_GABA_syn[151], 0.0, 0.0, 1.0))")
# Connection 152: 0, seg 0 (0.128279) -> 21, seg 6 (0.342041)
h("a_pyramidals_48[21].basal0 syn_SmallNet_bask_pyr_GABA_syn[152] = new GABA_syn(0.342041)")
h("a_baskets_12[0].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.12827933), syn_SmallNet_bask_pyr_GABA_syn[152], 0.0, 0.0, 1.0))")
# Connection 153: 11, seg 0 (0.098724) -> 21, seg 6 (0.228074)
h("a_pyramidals_48[21].basal0 syn_SmallNet_bask_pyr_GABA_syn[153] = new GABA_syn(0.228074)")
h("a_baskets_12[11].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.09872383), syn_SmallNet_bask_pyr_GABA_syn[153], 0.0, 0.0, 1.0))")
# Connection 154: 11, seg 0 (0.964672) -> 22, seg 6 (0.661521)
h("a_pyramidals_48[22].basal0 syn_SmallNet_bask_pyr_GABA_syn[154] = new GABA_syn(0.661521)")
h("a_baskets_12[11].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.9646719), syn_SmallNet_bask_pyr_GABA_syn[154], 0.0, 0.0, 1.0))")
# Connection 155: 10, seg 0 (0.502846) -> 22, seg 6 (0.012860)
h("a_pyramidals_48[22].basal0 syn_SmallNet_bask_pyr_GABA_syn[155] = new GABA_syn(0.012860)")
h("a_baskets_12[10].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.5028461), syn_SmallNet_bask_pyr_GABA_syn[155], 0.0, 0.0, 1.0))")
# Connection 156: 0, seg 0 (0.619464) -> 22, seg 6 (0.225784)
h("a_pyramidals_48[22].basal0 syn_SmallNet_bask_pyr_GABA_syn[156] = new GABA_syn(0.225784)")
h("a_baskets_12[0].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.61946416), syn_SmallNet_bask_pyr_GABA_syn[156], 0.0, 0.0, 1.0))")
# Connection 157: 0, seg 0 (0.263543) -> 22, seg 6 (0.288845)
h("a_pyramidals_48[22].basal0 syn_SmallNet_bask_pyr_GABA_syn[157] = new GABA_syn(0.288845)")
h("a_baskets_12[0].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.26354253), syn_SmallNet_bask_pyr_GABA_syn[157], 0.0, 0.0, 1.0))")
# Connection 158: 7, seg 0 (0.067736) -> 22, seg 6 (0.277812)
h("a_pyramidals_48[22].basal0 syn_SmallNet_bask_pyr_GABA_syn[158] = new GABA_syn(0.277812)")
h("a_baskets_12[7].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.06773561), syn_SmallNet_bask_pyr_GABA_syn[158], 0.0, 0.0, 1.0))")
# Connection 159: 7, seg 0 (0.592228) -> 22, seg 6 (0.552372)
h("a_pyramidals_48[22].basal0 syn_SmallNet_bask_pyr_GABA_syn[159] = new GABA_syn(0.552372)")
h("a_baskets_12[7].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.59222776), syn_SmallNet_bask_pyr_GABA_syn[159], 0.0, 0.0, 1.0))")
# Connection 160: 3, seg 0 (0.202644) -> 22, seg 6 (0.306237)
h("a_pyramidals_48[22].basal0 syn_SmallNet_bask_pyr_GABA_syn[160] = new GABA_syn(0.306237)")
h("a_baskets_12[3].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.20264441), syn_SmallNet_bask_pyr_GABA_syn[160], 0.0, 0.0, 1.0))")
# Connection 161: 1, seg 0 (0.558135) -> 23, seg 6 (0.737413)
h("a_pyramidals_48[23].basal0 syn_SmallNet_bask_pyr_GABA_syn[161] = new GABA_syn(0.737413)")
h("a_baskets_12[1].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.5581346), syn_SmallNet_bask_pyr_GABA_syn[161], 0.0, 0.0, 1.0))")
# Connection 162: 8, seg 0 (0.954807) -> 23, seg 6 (0.348410)
h("a_pyramidals_48[23].basal0 syn_SmallNet_bask_pyr_GABA_syn[162] = new GABA_syn(0.348410)")
h("a_baskets_12[8].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.954807), syn_SmallNet_bask_pyr_GABA_syn[162], 0.0, 0.0, 1.0))")
# Connection 163: 9, seg 0 (0.281111) -> 23, seg 6 (0.032505)
h("a_pyramidals_48[23].basal0 syn_SmallNet_bask_pyr_GABA_syn[163] = new GABA_syn(0.032505)")
h("a_baskets_12[9].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.28111118), syn_SmallNet_bask_pyr_GABA_syn[163], 0.0, 0.0, 1.0))")
# Connection 164: 5, seg 0 (0.921466) -> 23, seg 6 (0.007102)
h("a_pyramidals_48[23].basal0 syn_SmallNet_bask_pyr_GABA_syn[164] = new GABA_syn(0.007102)")
h("a_baskets_12[5].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.9214659), syn_SmallNet_bask_pyr_GABA_syn[164], 0.0, 0.0, 1.0))")
# Connection 165: 9, seg 0 (0.047955) -> 23, seg 6 (0.044126)
h("a_pyramidals_48[23].basal0 syn_SmallNet_bask_pyr_GABA_syn[165] = new GABA_syn(0.044126)")
h("a_baskets_12[9].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.047954977), syn_SmallNet_bask_pyr_GABA_syn[165], 0.0, 0.0, 1.0))")
# Connection 166: 11, seg 0 (0.199631) -> 23, seg 6 (0.622498)
h("a_pyramidals_48[23].basal0 syn_SmallNet_bask_pyr_GABA_syn[166] = new GABA_syn(0.622498)")
h("a_baskets_12[11].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.19963074), syn_SmallNet_bask_pyr_GABA_syn[166], 0.0, 0.0, 1.0))")
# Connection 167: 6, seg 0 (0.998752) -> 23, seg 6 (0.231140)
h("a_pyramidals_48[23].basal0 syn_SmallNet_bask_pyr_GABA_syn[167] = new GABA_syn(0.231140)")
h("a_baskets_12[6].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.9987523), syn_SmallNet_bask_pyr_GABA_syn[167], 0.0, 0.0, 1.0))")
# Connection 168: 5, seg 0 (0.205245) -> 24, seg 6 (0.002601)
h("a_pyramidals_48[24].basal0 syn_SmallNet_bask_pyr_GABA_syn[168] = new GABA_syn(0.002601)")
h("a_baskets_12[5].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.20524496), syn_SmallNet_bask_pyr_GABA_syn[168], 0.0, 0.0, 1.0))")
# Connection 169: 10, seg 0 (0.233157) -> 24, seg 6 (0.304517)
h("a_pyramidals_48[24].basal0 syn_SmallNet_bask_pyr_GABA_syn[169] = new GABA_syn(0.304517)")
h("a_baskets_12[10].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.23315698), syn_SmallNet_bask_pyr_GABA_syn[169], 0.0, 0.0, 1.0))")
# Connection 170: 4, seg 0 (0.944318) -> 24, seg 6 (0.824581)
h("a_pyramidals_48[24].basal0 syn_SmallNet_bask_pyr_GABA_syn[170] = new GABA_syn(0.824581)")
h("a_baskets_12[4].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.9443184), syn_SmallNet_bask_pyr_GABA_syn[170], 0.0, 0.0, 1.0))")
# Connection 171: 2, seg 0 (0.534032) -> 24, seg 6 (0.683173)
h("a_pyramidals_48[24].basal0 syn_SmallNet_bask_pyr_GABA_syn[171] = new GABA_syn(0.683173)")
h("a_baskets_12[2].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.53403205), syn_SmallNet_bask_pyr_GABA_syn[171], 0.0, 0.0, 1.0))")
# Connection 172: 10, seg 0 (0.884406) -> 24, seg 6 (0.591072)
h("a_pyramidals_48[24].basal0 syn_SmallNet_bask_pyr_GABA_syn[172] = new GABA_syn(0.591072)")
h("a_baskets_12[10].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.8844063), syn_SmallNet_bask_pyr_GABA_syn[172], 0.0, 0.0, 1.0))")
# Connection 173: 0, seg 0 (0.883653) -> 24, seg 6 (0.897702)
h("a_pyramidals_48[24].basal0 syn_SmallNet_bask_pyr_GABA_syn[173] = new GABA_syn(0.897702)")
h("a_baskets_12[0].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.8836534), syn_SmallNet_bask_pyr_GABA_syn[173], 0.0, 0.0, 1.0))")
# Connection 174: 11, seg 0 (0.572778) -> 24, seg 6 (0.773243)
h("a_pyramidals_48[24].basal0 syn_SmallNet_bask_pyr_GABA_syn[174] = new GABA_syn(0.773243)")
h("a_baskets_12[11].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.5727781), syn_SmallNet_bask_pyr_GABA_syn[174], 0.0, 0.0, 1.0))")
# Connection 175: 6, seg 0 (0.544422) -> 25, seg 6 (0.748515)
h("a_pyramidals_48[25].basal0 syn_SmallNet_bask_pyr_GABA_syn[175] = new GABA_syn(0.748515)")
h("a_baskets_12[6].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.5444217), syn_SmallNet_bask_pyr_GABA_syn[175], 0.0, 0.0, 1.0))")
# Connection 176: 0, seg 0 (0.298995) -> 25, seg 6 (0.315899)
h("a_pyramidals_48[25].basal0 syn_SmallNet_bask_pyr_GABA_syn[176] = new GABA_syn(0.315899)")
h("a_baskets_12[0].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.29899544), syn_SmallNet_bask_pyr_GABA_syn[176], 0.0, 0.0, 1.0))")
# Connection 177: 10, seg 0 (0.022965) -> 25, seg 6 (0.034504)
h("a_pyramidals_48[25].basal0 syn_SmallNet_bask_pyr_GABA_syn[177] = new GABA_syn(0.034504)")
h("a_baskets_12[10].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.02296549), syn_SmallNet_bask_pyr_GABA_syn[177], 0.0, 0.0, 1.0))")
# Connection 178: 8, seg 0 (0.142681) -> 25, seg 6 (0.739770)
h("a_pyramidals_48[25].basal0 syn_SmallNet_bask_pyr_GABA_syn[178] = new GABA_syn(0.739770)")
h("a_baskets_12[8].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.14268136), syn_SmallNet_bask_pyr_GABA_syn[178], 0.0, 0.0, 1.0))")
# Connection 179: 1, seg 0 (0.651389) -> 25, seg 6 (0.274489)
h("a_pyramidals_48[25].basal0 syn_SmallNet_bask_pyr_GABA_syn[179] = new GABA_syn(0.274489)")
h("a_baskets_12[1].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.6513891), syn_SmallNet_bask_pyr_GABA_syn[179], 0.0, 0.0, 1.0))")
# Connection 180: 7, seg 0 (0.289481) -> 25, seg 6 (0.421406)
h("a_pyramidals_48[25].basal0 syn_SmallNet_bask_pyr_GABA_syn[180] = new GABA_syn(0.421406)")
h("a_baskets_12[7].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.2894811), syn_SmallNet_bask_pyr_GABA_syn[180], 0.0, 0.0, 1.0))")
# Connection 181: 8, seg 0 (0.723762) -> 25, seg 6 (0.950310)
h("a_pyramidals_48[25].basal0 syn_SmallNet_bask_pyr_GABA_syn[181] = new GABA_syn(0.950310)")
h("a_baskets_12[8].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.72376156), syn_SmallNet_bask_pyr_GABA_syn[181], 0.0, 0.0, 1.0))")
# Connection 182: 7, seg 0 (0.859575) -> 26, seg 6 (0.176956)
h("a_pyramidals_48[26].basal0 syn_SmallNet_bask_pyr_GABA_syn[182] = new GABA_syn(0.176956)")
h("a_baskets_12[7].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.85957533), syn_SmallNet_bask_pyr_GABA_syn[182], 0.0, 0.0, 1.0))")
# Connection 183: 2, seg 0 (0.249032) -> 26, seg 6 (0.168488)
h("a_pyramidals_48[26].basal0 syn_SmallNet_bask_pyr_GABA_syn[183] = new GABA_syn(0.168488)")
h("a_baskets_12[2].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.24903244), syn_SmallNet_bask_pyr_GABA_syn[183], 0.0, 0.0, 1.0))")
# Connection 184: 9, seg 0 (0.427851) -> 26, seg 6 (0.525832)
h("a_pyramidals_48[26].basal0 syn_SmallNet_bask_pyr_GABA_syn[184] = new GABA_syn(0.525832)")
h("a_baskets_12[9].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.42785096), syn_SmallNet_bask_pyr_GABA_syn[184], 0.0, 0.0, 1.0))")
# Connection 185: 8, seg 0 (0.457960) -> 26, seg 6 (0.324237)
h("a_pyramidals_48[26].basal0 syn_SmallNet_bask_pyr_GABA_syn[185] = new GABA_syn(0.324237)")
h("a_baskets_12[8].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.45796), syn_SmallNet_bask_pyr_GABA_syn[185], 0.0, 0.0, 1.0))")
# Connection 186: 11, seg 0 (0.684277) -> 26, seg 6 (0.082128)
h("a_pyramidals_48[26].basal0 syn_SmallNet_bask_pyr_GABA_syn[186] = new GABA_syn(0.082128)")
h("a_baskets_12[11].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.6842771), syn_SmallNet_bask_pyr_GABA_syn[186], 0.0, 0.0, 1.0))")
# Connection 187: 8, seg 0 (0.413210) -> 26, seg 6 (0.262709)
h("a_pyramidals_48[26].basal0 syn_SmallNet_bask_pyr_GABA_syn[187] = new GABA_syn(0.262709)")
h("a_baskets_12[8].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.4132104), syn_SmallNet_bask_pyr_GABA_syn[187], 0.0, 0.0, 1.0))")
# Connection 188: 5, seg 0 (0.670862) -> 26, seg 6 (0.090325)
h("a_pyramidals_48[26].basal0 syn_SmallNet_bask_pyr_GABA_syn[188] = new GABA_syn(0.090325)")
h("a_baskets_12[5].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.67086184), syn_SmallNet_bask_pyr_GABA_syn[188], 0.0, 0.0, 1.0))")
# Connection 189: 9, seg 0 (0.178680) -> 27, seg 6 (0.083302)
h("a_pyramidals_48[27].basal0 syn_SmallNet_bask_pyr_GABA_syn[189] = new GABA_syn(0.083302)")
h("a_baskets_12[9].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.17867965), syn_SmallNet_bask_pyr_GABA_syn[189], 0.0, 0.0, 1.0))")
# Connection 190: 6, seg 0 (0.061757) -> 27, seg 6 (0.951204)
h("a_pyramidals_48[27].basal0 syn_SmallNet_bask_pyr_GABA_syn[190] = new GABA_syn(0.951204)")
h("a_baskets_12[6].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.06175655), syn_SmallNet_bask_pyr_GABA_syn[190], 0.0, 0.0, 1.0))")
# Connection 191: 6, seg 0 (0.452912) -> 27, seg 6 (0.539085)
h("a_pyramidals_48[27].basal0 syn_SmallNet_bask_pyr_GABA_syn[191] = new GABA_syn(0.539085)")
h("a_baskets_12[6].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.45291203), syn_SmallNet_bask_pyr_GABA_syn[191], 0.0, 0.0, 1.0))")
# Connection 192: 9, seg 0 (0.689391) -> 27, seg 6 (0.921523)
h("a_pyramidals_48[27].basal0 syn_SmallNet_bask_pyr_GABA_syn[192] = new GABA_syn(0.921523)")
h("a_baskets_12[9].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.6893911), syn_SmallNet_bask_pyr_GABA_syn[192], 0.0, 0.0, 1.0))")
# Connection 193: 1, seg 0 (0.862852) -> 27, seg 6 (0.860429)
h("a_pyramidals_48[27].basal0 syn_SmallNet_bask_pyr_GABA_syn[193] = new GABA_syn(0.860429)")
h("a_baskets_12[1].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.8628518), syn_SmallNet_bask_pyr_GABA_syn[193], 0.0, 0.0, 1.0))")
# Connection 194: 7, seg 0 (0.794172) -> 27, seg 6 (0.115295)
h("a_pyramidals_48[27].basal0 syn_SmallNet_bask_pyr_GABA_syn[194] = new GABA_syn(0.115295)")
h("a_baskets_12[7].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.7941718), syn_SmallNet_bask_pyr_GABA_syn[194], 0.0, 0.0, 1.0))")
# Connection 195: 3, seg 0 (0.355187) -> 27, seg 6 (0.613528)
h("a_pyramidals_48[27].basal0 syn_SmallNet_bask_pyr_GABA_syn[195] = new GABA_syn(0.613528)")
h("a_baskets_12[3].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.35518742), syn_SmallNet_bask_pyr_GABA_syn[195], 0.0, 0.0, 1.0))")
# Connection 196: 8, seg 0 (0.113192) -> 28, seg 6 (0.984760)
h("a_pyramidals_48[28].basal0 syn_SmallNet_bask_pyr_GABA_syn[196] = new GABA_syn(0.984760)")
h("a_baskets_12[8].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.11319184), syn_SmallNet_bask_pyr_GABA_syn[196], 0.0, 0.0, 1.0))")
# Connection 197: 0, seg 0 (0.284039) -> 28, seg 6 (0.347163)
h("a_pyramidals_48[28].basal0 syn_SmallNet_bask_pyr_GABA_syn[197] = new GABA_syn(0.347163)")
h("a_baskets_12[0].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.2840386), syn_SmallNet_bask_pyr_GABA_syn[197], 0.0, 0.0, 1.0))")
# Connection 198: 11, seg 0 (0.597295) -> 28, seg 6 (0.795608)
h("a_pyramidals_48[28].basal0 syn_SmallNet_bask_pyr_GABA_syn[198] = new GABA_syn(0.795608)")
h("a_baskets_12[11].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.5972948), syn_SmallNet_bask_pyr_GABA_syn[198], 0.0, 0.0, 1.0))")
# Connection 199: 6, seg 0 (0.035170) -> 28, seg 6 (0.050588)
h("a_pyramidals_48[28].basal0 syn_SmallNet_bask_pyr_GABA_syn[199] = new GABA_syn(0.050588)")
h("a_baskets_12[6].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.03516972), syn_SmallNet_bask_pyr_GABA_syn[199], 0.0, 0.0, 1.0))")
# Connection 200: 5, seg 0 (0.415893) -> 28, seg 6 (0.253465)
h("a_pyramidals_48[28].basal0 syn_SmallNet_bask_pyr_GABA_syn[200] = new GABA_syn(0.253465)")
h("a_baskets_12[5].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.41589284), syn_SmallNet_bask_pyr_GABA_syn[200], 0.0, 0.0, 1.0))")
# Connection 201: 1, seg 0 (0.546374) -> 28, seg 6 (0.742045)
h("a_pyramidals_48[28].basal0 syn_SmallNet_bask_pyr_GABA_syn[201] = new GABA_syn(0.742045)")
h("a_baskets_12[1].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.54637355), syn_SmallNet_bask_pyr_GABA_syn[201], 0.0, 0.0, 1.0))")
# Connection 202: 1, seg 0 (0.769860) -> 28, seg 6 (0.267712)
h("a_pyramidals_48[28].basal0 syn_SmallNet_bask_pyr_GABA_syn[202] = new GABA_syn(0.267712)")
h("a_baskets_12[1].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.76986027), syn_SmallNet_bask_pyr_GABA_syn[202], 0.0, 0.0, 1.0))")
# Connection 203: 3, seg 0 (0.293428) -> 29, seg 6 (0.710812)
h("a_pyramidals_48[29].basal0 syn_SmallNet_bask_pyr_GABA_syn[203] = new GABA_syn(0.710812)")
h("a_baskets_12[3].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.29342848), syn_SmallNet_bask_pyr_GABA_syn[203], 0.0, 0.0, 1.0))")
# Connection 204: 10, seg 0 (0.909594) -> 29, seg 6 (0.565764)
h("a_pyramidals_48[29].basal0 syn_SmallNet_bask_pyr_GABA_syn[204] = new GABA_syn(0.565764)")
h("a_baskets_12[10].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.9095945), syn_SmallNet_bask_pyr_GABA_syn[204], 0.0, 0.0, 1.0))")
# Connection 205: 10, seg 0 (0.733374) -> 29, seg 6 (0.616470)
h("a_pyramidals_48[29].basal0 syn_SmallNet_bask_pyr_GABA_syn[205] = new GABA_syn(0.616470)")
h("a_baskets_12[10].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.7333738), syn_SmallNet_bask_pyr_GABA_syn[205], 0.0, 0.0, 1.0))")
# Connection 206: 9, seg 0 (0.357856) -> 29, seg 6 (0.378890)
h("a_pyramidals_48[29].basal0 syn_SmallNet_bask_pyr_GABA_syn[206] = new GABA_syn(0.378890)")
h("a_baskets_12[9].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.3578564), syn_SmallNet_bask_pyr_GABA_syn[206], 0.0, 0.0, 1.0))")
# Connection 207: 5, seg 0 (0.318369) -> 29, seg 6 (0.713468)
h("a_pyramidals_48[29].basal0 syn_SmallNet_bask_pyr_GABA_syn[207] = new GABA_syn(0.713468)")
h("a_baskets_12[5].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.31836945), syn_SmallNet_bask_pyr_GABA_syn[207], 0.0, 0.0, 1.0))")
# Connection 208: 1, seg 0 (0.502559) -> 29, seg 6 (0.498475)
h("a_pyramidals_48[29].basal0 syn_SmallNet_bask_pyr_GABA_syn[208] = new GABA_syn(0.498475)")
h("a_baskets_12[1].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.5025592), syn_SmallNet_bask_pyr_GABA_syn[208], 0.0, 0.0, 1.0))")
# Connection 209: 5, seg 0 (0.609867) -> 29, seg 6 (0.271293)
h("a_pyramidals_48[29].basal0 syn_SmallNet_bask_pyr_GABA_syn[209] = new GABA_syn(0.271293)")
h("a_baskets_12[5].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.6098673), syn_SmallNet_bask_pyr_GABA_syn[209], 0.0, 0.0, 1.0))")
# Connection 210: 4, seg 0 (0.945813) -> 30, seg 6 (0.332769)
h("a_pyramidals_48[30].basal0 syn_SmallNet_bask_pyr_GABA_syn[210] = new GABA_syn(0.332769)")
h("a_baskets_12[4].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.9458131), syn_SmallNet_bask_pyr_GABA_syn[210], 0.0, 0.0, 1.0))")
# Connection 211: 0, seg 0 (0.550266) -> 30, seg 6 (0.438771)
h("a_pyramidals_48[30].basal0 syn_SmallNet_bask_pyr_GABA_syn[211] = new GABA_syn(0.438771)")
h("a_baskets_12[0].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.55026615), syn_SmallNet_bask_pyr_GABA_syn[211], 0.0, 0.0, 1.0))")
# Connection 212: 1, seg 0 (0.349817) -> 30, seg 6 (0.958367)
h("a_pyramidals_48[30].basal0 syn_SmallNet_bask_pyr_GABA_syn[212] = new GABA_syn(0.958367)")
h("a_baskets_12[1].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.34981722), syn_SmallNet_bask_pyr_GABA_syn[212], 0.0, 0.0, 1.0))")
# Connection 213: 6, seg 0 (0.970994) -> 30, seg 6 (0.056052)
h("a_pyramidals_48[30].basal0 syn_SmallNet_bask_pyr_GABA_syn[213] = new GABA_syn(0.056052)")
h("a_baskets_12[6].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.9709941), syn_SmallNet_bask_pyr_GABA_syn[213], 0.0, 0.0, 1.0))")
# Connection 214: 2, seg 0 (0.786947) -> 30, seg 6 (0.148564)
h("a_pyramidals_48[30].basal0 syn_SmallNet_bask_pyr_GABA_syn[214] = new GABA_syn(0.148564)")
h("a_baskets_12[2].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.78694695), syn_SmallNet_bask_pyr_GABA_syn[214], 0.0, 0.0, 1.0))")
# Connection 215: 11, seg 0 (0.578783) -> 30, seg 6 (0.180171)
h("a_pyramidals_48[30].basal0 syn_SmallNet_bask_pyr_GABA_syn[215] = new GABA_syn(0.180171)")
h("a_baskets_12[11].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.57878256), syn_SmallNet_bask_pyr_GABA_syn[215], 0.0, 0.0, 1.0))")
# Connection 216: 11, seg 0 (0.069420) -> 30, seg 6 (0.496234)
h("a_pyramidals_48[30].basal0 syn_SmallNet_bask_pyr_GABA_syn[216] = new GABA_syn(0.496234)")
h("a_baskets_12[11].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.06941956), syn_SmallNet_bask_pyr_GABA_syn[216], 0.0, 0.0, 1.0))")
# Connection 217: 0, seg 0 (0.128139) -> 31, seg 6 (0.981879)
h("a_pyramidals_48[31].basal0 syn_SmallNet_bask_pyr_GABA_syn[217] = new GABA_syn(0.981879)")
h("a_baskets_12[0].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.12813878), syn_SmallNet_bask_pyr_GABA_syn[217], 0.0, 0.0, 1.0))")
# Connection 218: 8, seg 0 (0.972708) -> 31, seg 6 (0.757359)
h("a_pyramidals_48[31].basal0 syn_SmallNet_bask_pyr_GABA_syn[218] = new GABA_syn(0.757359)")
h("a_baskets_12[8].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.972708), syn_SmallNet_bask_pyr_GABA_syn[218], 0.0, 0.0, 1.0))")
# Connection 219: 3, seg 0 (0.294907) -> 31, seg 6 (0.894245)
h("a_pyramidals_48[31].basal0 syn_SmallNet_bask_pyr_GABA_syn[219] = new GABA_syn(0.894245)")
h("a_baskets_12[3].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.29490733), syn_SmallNet_bask_pyr_GABA_syn[219], 0.0, 0.0, 1.0))")
# Connection 220: 3, seg 0 (0.752860) -> 31, seg 6 (0.596460)
h("a_pyramidals_48[31].basal0 syn_SmallNet_bask_pyr_GABA_syn[220] = new GABA_syn(0.596460)")
h("a_baskets_12[3].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.75286037), syn_SmallNet_bask_pyr_GABA_syn[220], 0.0, 0.0, 1.0))")
# Connection 221: 10, seg 0 (0.436033) -> 31, seg 6 (0.139410)
h("a_pyramidals_48[31].basal0 syn_SmallNet_bask_pyr_GABA_syn[221] = new GABA_syn(0.139410)")
h("a_baskets_12[10].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.43603307), syn_SmallNet_bask_pyr_GABA_syn[221], 0.0, 0.0, 1.0))")
# Connection 222: 10, seg 0 (0.115804) -> 31, seg 6 (0.038587)
h("a_pyramidals_48[31].basal0 syn_SmallNet_bask_pyr_GABA_syn[222] = new GABA_syn(0.038587)")
h("a_baskets_12[10].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.11580366), syn_SmallNet_bask_pyr_GABA_syn[222], 0.0, 0.0, 1.0))")
# Connection 223: 0, seg 0 (0.812825) -> 31, seg 6 (0.960815)
h("a_pyramidals_48[31].basal0 syn_SmallNet_bask_pyr_GABA_syn[223] = new GABA_syn(0.960815)")
h("a_baskets_12[0].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.8128254), syn_SmallNet_bask_pyr_GABA_syn[223], 0.0, 0.0, 1.0))")
# Connection 224: 7, seg 0 (0.935023) -> 32, seg 6 (0.317733)
h("a_pyramidals_48[32].basal0 syn_SmallNet_bask_pyr_GABA_syn[224] = new GABA_syn(0.317733)")
h("a_baskets_12[7].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.9350227), syn_SmallNet_bask_pyr_GABA_syn[224], 0.0, 0.0, 1.0))")
# Connection 225: 0, seg 0 (0.335847) -> 32, seg 6 (0.091403)
h("a_pyramidals_48[32].basal0 syn_SmallNet_bask_pyr_GABA_syn[225] = new GABA_syn(0.091403)")
h("a_baskets_12[0].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.3358475), syn_SmallNet_bask_pyr_GABA_syn[225], 0.0, 0.0, 1.0))")
# Connection 226: 2, seg 0 (0.002011) -> 32, seg 6 (0.684074)
h("a_pyramidals_48[32].basal0 syn_SmallNet_bask_pyr_GABA_syn[226] = new GABA_syn(0.684074)")
h("a_baskets_12[2].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.002010703), syn_SmallNet_bask_pyr_GABA_syn[226], 0.0, 0.0, 1.0))")
# Connection 227: 9, seg 0 (0.253534) -> 32, seg 6 (0.858540)
h("a_pyramidals_48[32].basal0 syn_SmallNet_bask_pyr_GABA_syn[227] = new GABA_syn(0.858540)")
h("a_baskets_12[9].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.25353402), syn_SmallNet_bask_pyr_GABA_syn[227], 0.0, 0.0, 1.0))")
# Connection 228: 0, seg 0 (0.046917) -> 32, seg 6 (0.453456)
h("a_pyramidals_48[32].basal0 syn_SmallNet_bask_pyr_GABA_syn[228] = new GABA_syn(0.453456)")
h("a_baskets_12[0].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.04691708), syn_SmallNet_bask_pyr_GABA_syn[228], 0.0, 0.0, 1.0))")
# Connection 229: 11, seg 0 (0.362923) -> 32, seg 6 (0.250506)
h("a_pyramidals_48[32].basal0 syn_SmallNet_bask_pyr_GABA_syn[229] = new GABA_syn(0.250506)")
h("a_baskets_12[11].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.36292326), syn_SmallNet_bask_pyr_GABA_syn[229], 0.0, 0.0, 1.0))")
# Connection 230: 7, seg 0 (0.574753) -> 32, seg 6 (0.047337)
h("a_pyramidals_48[32].basal0 syn_SmallNet_bask_pyr_GABA_syn[230] = new GABA_syn(0.047337)")
h("a_baskets_12[7].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.57475275), syn_SmallNet_bask_pyr_GABA_syn[230], 0.0, 0.0, 1.0))")
# Connection 231: 7, seg 0 (0.610481) -> 33, seg 6 (0.252653)
h("a_pyramidals_48[33].basal0 syn_SmallNet_bask_pyr_GABA_syn[231] = new GABA_syn(0.252653)")
h("a_baskets_12[7].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.6104809), syn_SmallNet_bask_pyr_GABA_syn[231], 0.0, 0.0, 1.0))")
# Connection 232: 9, seg 0 (0.615390) -> 33, seg 6 (0.746431)
h("a_pyramidals_48[33].basal0 syn_SmallNet_bask_pyr_GABA_syn[232] = new GABA_syn(0.746431)")
h("a_baskets_12[9].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.6153902), syn_SmallNet_bask_pyr_GABA_syn[232], 0.0, 0.0, 1.0))")
# Connection 233: 10, seg 0 (0.051336) -> 33, seg 6 (0.776664)
h("a_pyramidals_48[33].basal0 syn_SmallNet_bask_pyr_GABA_syn[233] = new GABA_syn(0.776664)")
h("a_baskets_12[10].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.05133581), syn_SmallNet_bask_pyr_GABA_syn[233], 0.0, 0.0, 1.0))")
# Connection 234: 1, seg 0 (0.719114) -> 33, seg 6 (0.768757)
h("a_pyramidals_48[33].basal0 syn_SmallNet_bask_pyr_GABA_syn[234] = new GABA_syn(0.768757)")
h("a_baskets_12[1].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.719114), syn_SmallNet_bask_pyr_GABA_syn[234], 0.0, 0.0, 1.0))")
# Connection 235: 3, seg 0 (0.853033) -> 33, seg 6 (0.674951)
h("a_pyramidals_48[33].basal0 syn_SmallNet_bask_pyr_GABA_syn[235] = new GABA_syn(0.674951)")
h("a_baskets_12[3].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.8530328), syn_SmallNet_bask_pyr_GABA_syn[235], 0.0, 0.0, 1.0))")
# Connection 236: 5, seg 0 (0.731676) -> 33, seg 6 (0.894318)
h("a_pyramidals_48[33].basal0 syn_SmallNet_bask_pyr_GABA_syn[236] = new GABA_syn(0.894318)")
h("a_baskets_12[5].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.7316759), syn_SmallNet_bask_pyr_GABA_syn[236], 0.0, 0.0, 1.0))")
# Connection 237: 3, seg 0 (0.876547) -> 33, seg 6 (0.283782)
h("a_pyramidals_48[33].basal0 syn_SmallNet_bask_pyr_GABA_syn[237] = new GABA_syn(0.283782)")
h("a_baskets_12[3].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.87654656), syn_SmallNet_bask_pyr_GABA_syn[237], 0.0, 0.0, 1.0))")
# Connection 238: 9, seg 0 (0.672164) -> 34, seg 6 (0.124130)
h("a_pyramidals_48[34].basal0 syn_SmallNet_bask_pyr_GABA_syn[238] = new GABA_syn(0.124130)")
h("a_baskets_12[9].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.6721639), syn_SmallNet_bask_pyr_GABA_syn[238], 0.0, 0.0, 1.0))")
# Connection 239: 0, seg 0 (0.655449) -> 34, seg 6 (0.916561)
h("a_pyramidals_48[34].basal0 syn_SmallNet_bask_pyr_GABA_syn[239] = new GABA_syn(0.916561)")
h("a_baskets_12[0].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.6554494), syn_SmallNet_bask_pyr_GABA_syn[239], 0.0, 0.0, 1.0))")
# Connection 240: 11, seg 0 (0.640805) -> 34, seg 6 (0.992249)
h("a_pyramidals_48[34].basal0 syn_SmallNet_bask_pyr_GABA_syn[240] = new GABA_syn(0.992249)")
h("a_baskets_12[11].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.6408048), syn_SmallNet_bask_pyr_GABA_syn[240], 0.0, 0.0, 1.0))")
# Connection 241: 6, seg 0 (0.887184) -> 34, seg 6 (0.476303)
h("a_pyramidals_48[34].basal0 syn_SmallNet_bask_pyr_GABA_syn[241] = new GABA_syn(0.476303)")
h("a_baskets_12[6].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.88718355), syn_SmallNet_bask_pyr_GABA_syn[241], 0.0, 0.0, 1.0))")
# Connection 242: 1, seg 0 (0.614530) -> 34, seg 6 (0.620298)
h("a_pyramidals_48[34].basal0 syn_SmallNet_bask_pyr_GABA_syn[242] = new GABA_syn(0.620298)")
h("a_baskets_12[1].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.6145298), syn_SmallNet_bask_pyr_GABA_syn[242], 0.0, 0.0, 1.0))")
# Connection 243: 1, seg 0 (0.640852) -> 34, seg 6 (0.538370)
h("a_pyramidals_48[34].basal0 syn_SmallNet_bask_pyr_GABA_syn[243] = new GABA_syn(0.538370)")
h("a_baskets_12[1].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.6408524), syn_SmallNet_bask_pyr_GABA_syn[243], 0.0, 0.0, 1.0))")
# Connection 244: 1, seg 0 (0.934017) -> 34, seg 6 (0.408231)
h("a_pyramidals_48[34].basal0 syn_SmallNet_bask_pyr_GABA_syn[244] = new GABA_syn(0.408231)")
h("a_baskets_12[1].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.93401664), syn_SmallNet_bask_pyr_GABA_syn[244], 0.0, 0.0, 1.0))")
# Connection 245: 1, seg 0 (0.672067) -> 35, seg 6 (0.134870)
h("a_pyramidals_48[35].basal0 syn_SmallNet_bask_pyr_GABA_syn[245] = new GABA_syn(0.134870)")
h("a_baskets_12[1].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.6720668), syn_SmallNet_bask_pyr_GABA_syn[245], 0.0, 0.0, 1.0))")
# Connection 246: 6, seg 0 (0.501299) -> 35, seg 6 (0.237055)
h("a_pyramidals_48[35].basal0 syn_SmallNet_bask_pyr_GABA_syn[246] = new GABA_syn(0.237055)")
h("a_baskets_12[6].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.5012989), syn_SmallNet_bask_pyr_GABA_syn[246], 0.0, 0.0, 1.0))")
# Connection 247: 4, seg 0 (0.509468) -> 35, seg 6 (0.791108)
h("a_pyramidals_48[35].basal0 syn_SmallNet_bask_pyr_GABA_syn[247] = new GABA_syn(0.791108)")
h("a_baskets_12[4].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.50946826), syn_SmallNet_bask_pyr_GABA_syn[247], 0.0, 0.0, 1.0))")
# Connection 248: 3, seg 0 (0.905856) -> 35, seg 6 (0.716458)
h("a_pyramidals_48[35].basal0 syn_SmallNet_bask_pyr_GABA_syn[248] = new GABA_syn(0.716458)")
h("a_baskets_12[3].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.90585613), syn_SmallNet_bask_pyr_GABA_syn[248], 0.0, 0.0, 1.0))")
# Connection 249: 1, seg 0 (0.832906) -> 35, seg 6 (0.824649)
h("a_pyramidals_48[35].basal0 syn_SmallNet_bask_pyr_GABA_syn[249] = new GABA_syn(0.824649)")
h("a_baskets_12[1].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.8329058), syn_SmallNet_bask_pyr_GABA_syn[249], 0.0, 0.0, 1.0))")
# Connection 250: 6, seg 0 (0.422670) -> 35, seg 6 (0.003507)
h("a_pyramidals_48[35].basal0 syn_SmallNet_bask_pyr_GABA_syn[250] = new GABA_syn(0.003507)")
h("a_baskets_12[6].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.42266965), syn_SmallNet_bask_pyr_GABA_syn[250], 0.0, 0.0, 1.0))")
# Connection 251: 8, seg 0 (0.570558) -> 35, seg 6 (0.407547)
h("a_pyramidals_48[35].basal0 syn_SmallNet_bask_pyr_GABA_syn[251] = new GABA_syn(0.407547)")
h("a_baskets_12[8].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.5705579), syn_SmallNet_bask_pyr_GABA_syn[251], 0.0, 0.0, 1.0))")
# Connection 252: 10, seg 0 (0.487853) -> 36, seg 6 (0.423515)
h("a_pyramidals_48[36].basal0 syn_SmallNet_bask_pyr_GABA_syn[252] = new GABA_syn(0.423515)")
h("a_baskets_12[10].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.48785317), syn_SmallNet_bask_pyr_GABA_syn[252], 0.0, 0.0, 1.0))")
# Connection 253: 10, seg 0 (0.339963) -> 36, seg 6 (0.545671)
h("a_pyramidals_48[36].basal0 syn_SmallNet_bask_pyr_GABA_syn[253] = new GABA_syn(0.545671)")
h("a_baskets_12[10].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.33996302), syn_SmallNet_bask_pyr_GABA_syn[253], 0.0, 0.0, 1.0))")
# Connection 254: 5, seg 0 (0.497265) -> 36, seg 6 (0.898198)
h("a_pyramidals_48[36].basal0 syn_SmallNet_bask_pyr_GABA_syn[254] = new GABA_syn(0.898198)")
h("a_baskets_12[5].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.49726492), syn_SmallNet_bask_pyr_GABA_syn[254], 0.0, 0.0, 1.0))")
# Connection 255: 8, seg 0 (0.944687) -> 36, seg 6 (0.535666)
h("a_pyramidals_48[36].basal0 syn_SmallNet_bask_pyr_GABA_syn[255] = new GABA_syn(0.535666)")
h("a_baskets_12[8].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.94468653), syn_SmallNet_bask_pyr_GABA_syn[255], 0.0, 0.0, 1.0))")
# Connection 256: 2, seg 0 (0.534768) -> 36, seg 6 (0.622339)
h("a_pyramidals_48[36].basal0 syn_SmallNet_bask_pyr_GABA_syn[256] = new GABA_syn(0.622339)")
h("a_baskets_12[2].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.53476846), syn_SmallNet_bask_pyr_GABA_syn[256], 0.0, 0.0, 1.0))")
# Connection 257: 5, seg 0 (0.550097) -> 36, seg 6 (0.813767)
h("a_pyramidals_48[36].basal0 syn_SmallNet_bask_pyr_GABA_syn[257] = new GABA_syn(0.813767)")
h("a_baskets_12[5].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.55009663), syn_SmallNet_bask_pyr_GABA_syn[257], 0.0, 0.0, 1.0))")
# Connection 258: 8, seg 0 (0.291537) -> 36, seg 6 (0.708604)
h("a_pyramidals_48[36].basal0 syn_SmallNet_bask_pyr_GABA_syn[258] = new GABA_syn(0.708604)")
h("a_baskets_12[8].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.29153728), syn_SmallNet_bask_pyr_GABA_syn[258], 0.0, 0.0, 1.0))")
# Connection 259: 8, seg 0 (0.458472) -> 37, seg 6 (0.798385)
h("a_pyramidals_48[37].basal0 syn_SmallNet_bask_pyr_GABA_syn[259] = new GABA_syn(0.798385)")
h("a_baskets_12[8].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.45847178), syn_SmallNet_bask_pyr_GABA_syn[259], 0.0, 0.0, 1.0))")
# Connection 260: 0, seg 0 (0.532358) -> 37, seg 6 (0.067570)
h("a_pyramidals_48[37].basal0 syn_SmallNet_bask_pyr_GABA_syn[260] = new GABA_syn(0.067570)")
h("a_baskets_12[0].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.5323577), syn_SmallNet_bask_pyr_GABA_syn[260], 0.0, 0.0, 1.0))")
# Connection 261: 2, seg 0 (0.386036) -> 37, seg 6 (0.432587)
h("a_pyramidals_48[37].basal0 syn_SmallNet_bask_pyr_GABA_syn[261] = new GABA_syn(0.432587)")
h("a_baskets_12[2].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.38603616), syn_SmallNet_bask_pyr_GABA_syn[261], 0.0, 0.0, 1.0))")
# Connection 262: 10, seg 0 (0.121947) -> 37, seg 6 (0.794440)
h("a_pyramidals_48[37].basal0 syn_SmallNet_bask_pyr_GABA_syn[262] = new GABA_syn(0.794440)")
h("a_baskets_12[10].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.12194735), syn_SmallNet_bask_pyr_GABA_syn[262], 0.0, 0.0, 1.0))")
# Connection 263: 7, seg 0 (0.163175) -> 37, seg 6 (0.538829)
h("a_pyramidals_48[37].basal0 syn_SmallNet_bask_pyr_GABA_syn[263] = new GABA_syn(0.538829)")
h("a_baskets_12[7].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.16317493), syn_SmallNet_bask_pyr_GABA_syn[263], 0.0, 0.0, 1.0))")
# Connection 264: 0, seg 0 (0.354389) -> 37, seg 6 (0.261961)
h("a_pyramidals_48[37].basal0 syn_SmallNet_bask_pyr_GABA_syn[264] = new GABA_syn(0.261961)")
h("a_baskets_12[0].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.3543886), syn_SmallNet_bask_pyr_GABA_syn[264], 0.0, 0.0, 1.0))")
# Connection 265: 9, seg 0 (0.193952) -> 37, seg 6 (0.367663)
h("a_pyramidals_48[37].basal0 syn_SmallNet_bask_pyr_GABA_syn[265] = new GABA_syn(0.367663)")
h("a_baskets_12[9].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.19395155), syn_SmallNet_bask_pyr_GABA_syn[265], 0.0, 0.0, 1.0))")
# Connection 266: 3, seg 0 (0.974196) -> 38, seg 6 (0.222306)
h("a_pyramidals_48[38].basal0 syn_SmallNet_bask_pyr_GABA_syn[266] = new GABA_syn(0.222306)")
h("a_baskets_12[3].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.97419643), syn_SmallNet_bask_pyr_GABA_syn[266], 0.0, 0.0, 1.0))")
# Connection 267: 7, seg 0 (0.601360) -> 38, seg 6 (0.608262)
h("a_pyramidals_48[38].basal0 syn_SmallNet_bask_pyr_GABA_syn[267] = new GABA_syn(0.608262)")
h("a_baskets_12[7].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.6013603), syn_SmallNet_bask_pyr_GABA_syn[267], 0.0, 0.0, 1.0))")
# Connection 268: 11, seg 0 (0.030022) -> 38, seg 6 (0.114480)
h("a_pyramidals_48[38].basal0 syn_SmallNet_bask_pyr_GABA_syn[268] = new GABA_syn(0.114480)")
h("a_baskets_12[11].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.030022442), syn_SmallNet_bask_pyr_GABA_syn[268], 0.0, 0.0, 1.0))")
# Connection 269: 8, seg 0 (0.204172) -> 38, seg 6 (0.585428)
h("a_pyramidals_48[38].basal0 syn_SmallNet_bask_pyr_GABA_syn[269] = new GABA_syn(0.585428)")
h("a_baskets_12[8].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.2041719), syn_SmallNet_bask_pyr_GABA_syn[269], 0.0, 0.0, 1.0))")
# Connection 270: 10, seg 0 (0.604211) -> 38, seg 6 (0.512805)
h("a_pyramidals_48[38].basal0 syn_SmallNet_bask_pyr_GABA_syn[270] = new GABA_syn(0.512805)")
h("a_baskets_12[10].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.604211), syn_SmallNet_bask_pyr_GABA_syn[270], 0.0, 0.0, 1.0))")
# Connection 271: 5, seg 0 (0.786598) -> 38, seg 6 (0.116796)
h("a_pyramidals_48[38].basal0 syn_SmallNet_bask_pyr_GABA_syn[271] = new GABA_syn(0.116796)")
h("a_baskets_12[5].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.78659755), syn_SmallNet_bask_pyr_GABA_syn[271], 0.0, 0.0, 1.0))")
# Connection 272: 5, seg 0 (0.521311) -> 38, seg 6 (0.323704)
h("a_pyramidals_48[38].basal0 syn_SmallNet_bask_pyr_GABA_syn[272] = new GABA_syn(0.323704)")
h("a_baskets_12[5].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.5213106), syn_SmallNet_bask_pyr_GABA_syn[272], 0.0, 0.0, 1.0))")
# Connection 273: 11, seg 0 (0.767487) -> 39, seg 6 (0.693204)
h("a_pyramidals_48[39].basal0 syn_SmallNet_bask_pyr_GABA_syn[273] = new GABA_syn(0.693204)")
h("a_baskets_12[11].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.76748747), syn_SmallNet_bask_pyr_GABA_syn[273], 0.0, 0.0, 1.0))")
# Connection 274: 7, seg 0 (0.154166) -> 39, seg 6 (0.035681)
h("a_pyramidals_48[39].basal0 syn_SmallNet_bask_pyr_GABA_syn[274] = new GABA_syn(0.035681)")
h("a_baskets_12[7].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.1541661), syn_SmallNet_bask_pyr_GABA_syn[274], 0.0, 0.0, 1.0))")
# Connection 275: 0, seg 0 (0.198447) -> 39, seg 6 (0.856734)
h("a_pyramidals_48[39].basal0 syn_SmallNet_bask_pyr_GABA_syn[275] = new GABA_syn(0.856734)")
h("a_baskets_12[0].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.19844651), syn_SmallNet_bask_pyr_GABA_syn[275], 0.0, 0.0, 1.0))")
# Connection 276: 11, seg 0 (0.856997) -> 39, seg 6 (0.461560)
h("a_pyramidals_48[39].basal0 syn_SmallNet_bask_pyr_GABA_syn[276] = new GABA_syn(0.461560)")
h("a_baskets_12[11].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.8569969), syn_SmallNet_bask_pyr_GABA_syn[276], 0.0, 0.0, 1.0))")
# Connection 277: 3, seg 0 (0.686399) -> 39, seg 6 (0.477086)
h("a_pyramidals_48[39].basal0 syn_SmallNet_bask_pyr_GABA_syn[277] = new GABA_syn(0.477086)")
h("a_baskets_12[3].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.68639916), syn_SmallNet_bask_pyr_GABA_syn[277], 0.0, 0.0, 1.0))")
# Connection 278: 2, seg 0 (0.105822) -> 39, seg 6 (0.658899)
h("a_pyramidals_48[39].basal0 syn_SmallNet_bask_pyr_GABA_syn[278] = new GABA_syn(0.658899)")
h("a_baskets_12[2].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.10582209), syn_SmallNet_bask_pyr_GABA_syn[278], 0.0, 0.0, 1.0))")
# Connection 279: 1, seg 0 (0.511767) -> 39, seg 6 (0.258686)
h("a_pyramidals_48[39].basal0 syn_SmallNet_bask_pyr_GABA_syn[279] = new GABA_syn(0.258686)")
h("a_baskets_12[1].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.5117668), syn_SmallNet_bask_pyr_GABA_syn[279], 0.0, 0.0, 1.0))")
# Connection 280: 5, seg 0 (0.147469) -> 40, seg 6 (0.498140)
h("a_pyramidals_48[40].basal0 syn_SmallNet_bask_pyr_GABA_syn[280] = new GABA_syn(0.498140)")
h("a_baskets_12[5].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.14746886), syn_SmallNet_bask_pyr_GABA_syn[280], 0.0, 0.0, 1.0))")
# Connection 281: 1, seg 0 (0.925909) -> 40, seg 6 (0.546794)
h("a_pyramidals_48[40].basal0 syn_SmallNet_bask_pyr_GABA_syn[281] = new GABA_syn(0.546794)")
h("a_baskets_12[1].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.925909), syn_SmallNet_bask_pyr_GABA_syn[281], 0.0, 0.0, 1.0))")
# Connection 282: 8, seg 0 (0.931934) -> 40, seg 6 (0.592890)
h("a_pyramidals_48[40].basal0 syn_SmallNet_bask_pyr_GABA_syn[282] = new GABA_syn(0.592890)")
h("a_baskets_12[8].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.9319342), syn_SmallNet_bask_pyr_GABA_syn[282], 0.0, 0.0, 1.0))")
# Connection 283: 10, seg 0 (0.843099) -> 40, seg 6 (0.807049)
h("a_pyramidals_48[40].basal0 syn_SmallNet_bask_pyr_GABA_syn[283] = new GABA_syn(0.807049)")
h("a_baskets_12[10].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.84309924), syn_SmallNet_bask_pyr_GABA_syn[283], 0.0, 0.0, 1.0))")
# Connection 284: 7, seg 0 (0.712860) -> 40, seg 6 (0.293512)
h("a_pyramidals_48[40].basal0 syn_SmallNet_bask_pyr_GABA_syn[284] = new GABA_syn(0.293512)")
h("a_baskets_12[7].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.71286017), syn_SmallNet_bask_pyr_GABA_syn[284], 0.0, 0.0, 1.0))")
# Connection 285: 2, seg 0 (0.800292) -> 40, seg 6 (0.870343)
h("a_pyramidals_48[40].basal0 syn_SmallNet_bask_pyr_GABA_syn[285] = new GABA_syn(0.870343)")
h("a_baskets_12[2].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.8002921), syn_SmallNet_bask_pyr_GABA_syn[285], 0.0, 0.0, 1.0))")
# Connection 286: 0, seg 0 (0.520459) -> 40, seg 6 (0.945339)
h("a_pyramidals_48[40].basal0 syn_SmallNet_bask_pyr_GABA_syn[286] = new GABA_syn(0.945339)")
h("a_baskets_12[0].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.52045894), syn_SmallNet_bask_pyr_GABA_syn[286], 0.0, 0.0, 1.0))")
# Connection 287: 8, seg 0 (0.809248) -> 41, seg 6 (0.551661)
h("a_pyramidals_48[41].basal0 syn_SmallNet_bask_pyr_GABA_syn[287] = new GABA_syn(0.551661)")
h("a_baskets_12[8].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.80924755), syn_SmallNet_bask_pyr_GABA_syn[287], 0.0, 0.0, 1.0))")
# Connection 288: 9, seg 0 (0.910129) -> 41, seg 6 (0.071675)
h("a_pyramidals_48[41].basal0 syn_SmallNet_bask_pyr_GABA_syn[288] = new GABA_syn(0.071675)")
h("a_baskets_12[9].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.9101289), syn_SmallNet_bask_pyr_GABA_syn[288], 0.0, 0.0, 1.0))")
# Connection 289: 5, seg 0 (0.778454) -> 41, seg 6 (0.923958)
h("a_pyramidals_48[41].basal0 syn_SmallNet_bask_pyr_GABA_syn[289] = new GABA_syn(0.923958)")
h("a_baskets_12[5].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.778454), syn_SmallNet_bask_pyr_GABA_syn[289], 0.0, 0.0, 1.0))")
# Connection 290: 1, seg 0 (0.508570) -> 41, seg 6 (0.522640)
h("a_pyramidals_48[41].basal0 syn_SmallNet_bask_pyr_GABA_syn[290] = new GABA_syn(0.522640)")
h("a_baskets_12[1].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.50857), syn_SmallNet_bask_pyr_GABA_syn[290], 0.0, 0.0, 1.0))")
# Connection 291: 3, seg 0 (0.940369) -> 41, seg 6 (0.355798)
h("a_pyramidals_48[41].basal0 syn_SmallNet_bask_pyr_GABA_syn[291] = new GABA_syn(0.355798)")
h("a_baskets_12[3].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.94036925), syn_SmallNet_bask_pyr_GABA_syn[291], 0.0, 0.0, 1.0))")
# Connection 292: 4, seg 0 (0.035468) -> 41, seg 6 (0.628971)
h("a_pyramidals_48[41].basal0 syn_SmallNet_bask_pyr_GABA_syn[292] = new GABA_syn(0.628971)")
h("a_baskets_12[4].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.03546846), syn_SmallNet_bask_pyr_GABA_syn[292], 0.0, 0.0, 1.0))")
# Connection 293: 6, seg 0 (0.328020) -> 41, seg 6 (0.948228)
h("a_pyramidals_48[41].basal0 syn_SmallNet_bask_pyr_GABA_syn[293] = new GABA_syn(0.948228)")
h("a_baskets_12[6].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.3280204), syn_SmallNet_bask_pyr_GABA_syn[293], 0.0, 0.0, 1.0))")
# Connection 294: 11, seg 0 (0.543841) -> 42, seg 6 (0.074087)
h("a_pyramidals_48[42].basal0 syn_SmallNet_bask_pyr_GABA_syn[294] = new GABA_syn(0.074087)")
h("a_baskets_12[11].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.54384124), syn_SmallNet_bask_pyr_GABA_syn[294], 0.0, 0.0, 1.0))")
# Connection 295: 8, seg 0 (0.992374) -> 42, seg 6 (0.638499)
h("a_pyramidals_48[42].basal0 syn_SmallNet_bask_pyr_GABA_syn[295] = new GABA_syn(0.638499)")
h("a_baskets_12[8].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.99237394), syn_SmallNet_bask_pyr_GABA_syn[295], 0.0, 0.0, 1.0))")
# Connection 296: 11, seg 0 (0.617561) -> 42, seg 6 (0.667823)
h("a_pyramidals_48[42].basal0 syn_SmallNet_bask_pyr_GABA_syn[296] = new GABA_syn(0.667823)")
h("a_baskets_12[11].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.617561), syn_SmallNet_bask_pyr_GABA_syn[296], 0.0, 0.0, 1.0))")
# Connection 297: 2, seg 0 (0.583685) -> 42, seg 6 (0.640924)
h("a_pyramidals_48[42].basal0 syn_SmallNet_bask_pyr_GABA_syn[297] = new GABA_syn(0.640924)")
h("a_baskets_12[2].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.5836846), syn_SmallNet_bask_pyr_GABA_syn[297], 0.0, 0.0, 1.0))")
# Connection 298: 0, seg 0 (0.204955) -> 42, seg 6 (0.900590)
h("a_pyramidals_48[42].basal0 syn_SmallNet_bask_pyr_GABA_syn[298] = new GABA_syn(0.900590)")
h("a_baskets_12[0].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.20495468), syn_SmallNet_bask_pyr_GABA_syn[298], 0.0, 0.0, 1.0))")
# Connection 299: 2, seg 0 (0.466114) -> 42, seg 6 (0.844572)
h("a_pyramidals_48[42].basal0 syn_SmallNet_bask_pyr_GABA_syn[299] = new GABA_syn(0.844572)")
h("a_baskets_12[2].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.46611398), syn_SmallNet_bask_pyr_GABA_syn[299], 0.0, 0.0, 1.0))")
# Connection 300: 4, seg 0 (0.388741) -> 42, seg 6 (0.199132)
h("a_pyramidals_48[42].basal0 syn_SmallNet_bask_pyr_GABA_syn[300] = new GABA_syn(0.199132)")
h("a_baskets_12[4].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.3887415), syn_SmallNet_bask_pyr_GABA_syn[300], 0.0, 0.0, 1.0))")
# Connection 301: 3, seg 0 (0.044362) -> 43, seg 6 (0.828116)
h("a_pyramidals_48[43].basal0 syn_SmallNet_bask_pyr_GABA_syn[301] = new GABA_syn(0.828116)")
h("a_baskets_12[3].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.04436207), syn_SmallNet_bask_pyr_GABA_syn[301], 0.0, 0.0, 1.0))")
# Connection 302: 2, seg 0 (0.677424) -> 43, seg 6 (0.452479)
h("a_pyramidals_48[43].basal0 syn_SmallNet_bask_pyr_GABA_syn[302] = new GABA_syn(0.452479)")
h("a_baskets_12[2].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.6774237), syn_SmallNet_bask_pyr_GABA_syn[302], 0.0, 0.0, 1.0))")
# Connection 303: 3, seg 0 (0.402985) -> 43, seg 6 (0.623731)
h("a_pyramidals_48[43].basal0 syn_SmallNet_bask_pyr_GABA_syn[303] = new GABA_syn(0.623731)")
h("a_baskets_12[3].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.4029848), syn_SmallNet_bask_pyr_GABA_syn[303], 0.0, 0.0, 1.0))")
# Connection 304: 9, seg 0 (0.530780) -> 43, seg 6 (0.045277)
h("a_pyramidals_48[43].basal0 syn_SmallNet_bask_pyr_GABA_syn[304] = new GABA_syn(0.045277)")
h("a_baskets_12[9].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.5307796), syn_SmallNet_bask_pyr_GABA_syn[304], 0.0, 0.0, 1.0))")
# Connection 305: 1, seg 0 (0.301571) -> 43, seg 6 (0.832962)
h("a_pyramidals_48[43].basal0 syn_SmallNet_bask_pyr_GABA_syn[305] = new GABA_syn(0.832962)")
h("a_baskets_12[1].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.30157137), syn_SmallNet_bask_pyr_GABA_syn[305], 0.0, 0.0, 1.0))")
# Connection 306: 10, seg 0 (0.151438) -> 43, seg 6 (0.866191)
h("a_pyramidals_48[43].basal0 syn_SmallNet_bask_pyr_GABA_syn[306] = new GABA_syn(0.866191)")
h("a_baskets_12[10].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.15143788), syn_SmallNet_bask_pyr_GABA_syn[306], 0.0, 0.0, 1.0))")
# Connection 307: 10, seg 0 (0.011771) -> 43, seg 6 (0.557898)
h("a_pyramidals_48[43].basal0 syn_SmallNet_bask_pyr_GABA_syn[307] = new GABA_syn(0.557898)")
h("a_baskets_12[10].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.011770546), syn_SmallNet_bask_pyr_GABA_syn[307], 0.0, 0.0, 1.0))")
# Connection 308: 8, seg 0 (0.417231) -> 44, seg 6 (0.581755)
h("a_pyramidals_48[44].basal0 syn_SmallNet_bask_pyr_GABA_syn[308] = new GABA_syn(0.581755)")
h("a_baskets_12[8].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.4172306), syn_SmallNet_bask_pyr_GABA_syn[308], 0.0, 0.0, 1.0))")
# Connection 309: 4, seg 0 (0.912749) -> 44, seg 6 (0.547306)
h("a_pyramidals_48[44].basal0 syn_SmallNet_bask_pyr_GABA_syn[309] = new GABA_syn(0.547306)")
h("a_baskets_12[4].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.9127492), syn_SmallNet_bask_pyr_GABA_syn[309], 0.0, 0.0, 1.0))")
# Connection 310: 0, seg 0 (0.446263) -> 44, seg 6 (0.967267)
h("a_pyramidals_48[44].basal0 syn_SmallNet_bask_pyr_GABA_syn[310] = new GABA_syn(0.967267)")
h("a_baskets_12[0].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.44626313), syn_SmallNet_bask_pyr_GABA_syn[310], 0.0, 0.0, 1.0))")
# Connection 311: 2, seg 0 (0.347881) -> 44, seg 6 (0.251653)
h("a_pyramidals_48[44].basal0 syn_SmallNet_bask_pyr_GABA_syn[311] = new GABA_syn(0.251653)")
h("a_baskets_12[2].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.34788138), syn_SmallNet_bask_pyr_GABA_syn[311], 0.0, 0.0, 1.0))")
# Connection 312: 8, seg 0 (0.914585) -> 44, seg 6 (0.569918)
h("a_pyramidals_48[44].basal0 syn_SmallNet_bask_pyr_GABA_syn[312] = new GABA_syn(0.569918)")
h("a_baskets_12[8].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.91458523), syn_SmallNet_bask_pyr_GABA_syn[312], 0.0, 0.0, 1.0))")
# Connection 313: 1, seg 0 (0.751852) -> 44, seg 6 (0.543621)
h("a_pyramidals_48[44].basal0 syn_SmallNet_bask_pyr_GABA_syn[313] = new GABA_syn(0.543621)")
h("a_baskets_12[1].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.7518518), syn_SmallNet_bask_pyr_GABA_syn[313], 0.0, 0.0, 1.0))")
# Connection 314: 2, seg 0 (0.272769) -> 44, seg 6 (0.961339)
h("a_pyramidals_48[44].basal0 syn_SmallNet_bask_pyr_GABA_syn[314] = new GABA_syn(0.961339)")
h("a_baskets_12[2].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.27276874), syn_SmallNet_bask_pyr_GABA_syn[314], 0.0, 0.0, 1.0))")
# Connection 315: 6, seg 0 (0.555910) -> 45, seg 6 (0.125805)
h("a_pyramidals_48[45].basal0 syn_SmallNet_bask_pyr_GABA_syn[315] = new GABA_syn(0.125805)")
h("a_baskets_12[6].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.55591035), syn_SmallNet_bask_pyr_GABA_syn[315], 0.0, 0.0, 1.0))")
# Connection 316: 4, seg 0 (0.431035) -> 45, seg 6 (0.206251)
h("a_pyramidals_48[45].basal0 syn_SmallNet_bask_pyr_GABA_syn[316] = new GABA_syn(0.206251)")
h("a_baskets_12[4].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.4310354), syn_SmallNet_bask_pyr_GABA_syn[316], 0.0, 0.0, 1.0))")
# Connection 317: 8, seg 0 (0.398925) -> 45, seg 6 (0.929112)
h("a_pyramidals_48[45].basal0 syn_SmallNet_bask_pyr_GABA_syn[317] = new GABA_syn(0.929112)")
h("a_baskets_12[8].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.39892495), syn_SmallNet_bask_pyr_GABA_syn[317], 0.0, 0.0, 1.0))")
# Connection 318: 3, seg 0 (0.323582) -> 45, seg 6 (0.194337)
h("a_pyramidals_48[45].basal0 syn_SmallNet_bask_pyr_GABA_syn[318] = new GABA_syn(0.194337)")
h("a_baskets_12[3].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.323582), syn_SmallNet_bask_pyr_GABA_syn[318], 0.0, 0.0, 1.0))")
# Connection 319: 7, seg 0 (0.776455) -> 45, seg 6 (0.430553)
h("a_pyramidals_48[45].basal0 syn_SmallNet_bask_pyr_GABA_syn[319] = new GABA_syn(0.430553)")
h("a_baskets_12[7].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.77645487), syn_SmallNet_bask_pyr_GABA_syn[319], 0.0, 0.0, 1.0))")
# Connection 320: 5, seg 0 (0.807747) -> 45, seg 6 (0.904698)
h("a_pyramidals_48[45].basal0 syn_SmallNet_bask_pyr_GABA_syn[320] = new GABA_syn(0.904698)")
h("a_baskets_12[5].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.8077465), syn_SmallNet_bask_pyr_GABA_syn[320], 0.0, 0.0, 1.0))")
# Connection 321: 0, seg 0 (0.633028) -> 45, seg 6 (0.322225)
h("a_pyramidals_48[45].basal0 syn_SmallNet_bask_pyr_GABA_syn[321] = new GABA_syn(0.322225)")
h("a_baskets_12[0].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.633028), syn_SmallNet_bask_pyr_GABA_syn[321], 0.0, 0.0, 1.0))")
# Connection 322: 2, seg 0 (0.576936) -> 46, seg 6 (0.070946)
h("a_pyramidals_48[46].basal0 syn_SmallNet_bask_pyr_GABA_syn[322] = new GABA_syn(0.070946)")
h("a_baskets_12[2].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.57693636), syn_SmallNet_bask_pyr_GABA_syn[322], 0.0, 0.0, 1.0))")
# Connection 323: 7, seg 0 (0.869527) -> 46, seg 6 (0.847209)
h("a_pyramidals_48[46].basal0 syn_SmallNet_bask_pyr_GABA_syn[323] = new GABA_syn(0.847209)")
h("a_baskets_12[7].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.8695265), syn_SmallNet_bask_pyr_GABA_syn[323], 0.0, 0.0, 1.0))")
# Connection 324: 0, seg 0 (0.998115) -> 46, seg 6 (0.256664)
h("a_pyramidals_48[46].basal0 syn_SmallNet_bask_pyr_GABA_syn[324] = new GABA_syn(0.256664)")
h("a_baskets_12[0].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.99811465), syn_SmallNet_bask_pyr_GABA_syn[324], 0.0, 0.0, 1.0))")
# Connection 325: 10, seg 0 (0.741050) -> 46, seg 6 (0.153532)
h("a_pyramidals_48[46].basal0 syn_SmallNet_bask_pyr_GABA_syn[325] = new GABA_syn(0.153532)")
h("a_baskets_12[10].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.7410501), syn_SmallNet_bask_pyr_GABA_syn[325], 0.0, 0.0, 1.0))")
# Connection 326: 8, seg 0 (0.660215) -> 46, seg 6 (0.543824)
h("a_pyramidals_48[46].basal0 syn_SmallNet_bask_pyr_GABA_syn[326] = new GABA_syn(0.543824)")
h("a_baskets_12[8].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.6602155), syn_SmallNet_bask_pyr_GABA_syn[326], 0.0, 0.0, 1.0))")
# Connection 327: 8, seg 0 (0.515127) -> 46, seg 6 (0.772163)
h("a_pyramidals_48[46].basal0 syn_SmallNet_bask_pyr_GABA_syn[327] = new GABA_syn(0.772163)")
h("a_baskets_12[8].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.5151271), syn_SmallNet_bask_pyr_GABA_syn[327], 0.0, 0.0, 1.0))")
# Connection 328: 2, seg 0 (0.362511) -> 46, seg 6 (0.606803)
h("a_pyramidals_48[46].basal0 syn_SmallNet_bask_pyr_GABA_syn[328] = new GABA_syn(0.606803)")
h("a_baskets_12[2].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.36251134), syn_SmallNet_bask_pyr_GABA_syn[328], 0.0, 0.0, 1.0))")
# Connection 329: 0, seg 0 (0.879999) -> 47, seg 6 (0.083833)
h("a_pyramidals_48[47].basal0 syn_SmallNet_bask_pyr_GABA_syn[329] = new GABA_syn(0.083833)")
h("a_baskets_12[0].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.87999946), syn_SmallNet_bask_pyr_GABA_syn[329], 0.0, 0.0, 1.0))")
# Connection 330: 11, seg 0 (0.234097) -> 47, seg 6 (0.826980)
h("a_pyramidals_48[47].basal0 syn_SmallNet_bask_pyr_GABA_syn[330] = new GABA_syn(0.826980)")
h("a_baskets_12[11].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.23409665), syn_SmallNet_bask_pyr_GABA_syn[330], 0.0, 0.0, 1.0))")
# Connection 331: 1, seg 0 (0.896576) -> 47, seg 6 (0.591950)
h("a_pyramidals_48[47].basal0 syn_SmallNet_bask_pyr_GABA_syn[331] = new GABA_syn(0.591950)")
h("a_baskets_12[1].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.8965758), syn_SmallNet_bask_pyr_GABA_syn[331], 0.0, 0.0, 1.0))")
# Connection 332: 5, seg 0 (0.578262) -> 47, seg 6 (0.629266)
h("a_pyramidals_48[47].basal0 syn_SmallNet_bask_pyr_GABA_syn[332] = new GABA_syn(0.629266)")
h("a_baskets_12[5].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.5782621), syn_SmallNet_bask_pyr_GABA_syn[332], 0.0, 0.0, 1.0))")
# Connection 333: 6, seg 0 (0.973102) -> 47, seg 6 (0.365342)
h("a_pyramidals_48[47].basal0 syn_SmallNet_bask_pyr_GABA_syn[333] = new GABA_syn(0.365342)")
h("a_baskets_12[6].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.97310203), syn_SmallNet_bask_pyr_GABA_syn[333], 0.0, 0.0, 1.0))")
# Connection 334: 3, seg 0 (0.316544) -> 47, seg 6 (0.931175)
h("a_pyramidals_48[47].basal0 syn_SmallNet_bask_pyr_GABA_syn[334] = new GABA_syn(0.931175)")
h("a_baskets_12[3].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.31654412), syn_SmallNet_bask_pyr_GABA_syn[334], 0.0, 0.0, 1.0))")
# Connection 335: 2, seg 0 (0.168193) -> 47, seg 6 (0.253872)
h("a_pyramidals_48[47].basal0 syn_SmallNet_bask_pyr_GABA_syn[335] = new GABA_syn(0.253872)")
h("a_baskets_12[2].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.1681934), syn_SmallNet_bask_pyr_GABA_syn[335], 0.0, 0.0, 1.0))")
# Adding projection: SmallNet_pyr_pyr, from pyramidals_48 to pyramidals_48 with synapse AMPA_syn, 336 connection(s)
h("objectvar syn_SmallNet_pyr_pyr_AMPA_syn[336]")
# Connection 0: 14, seg 0 (0.528143) -> 0, seg 3 (0.107969)
h("a_pyramidals_48[0].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[0] = new AMPA_syn(0.107969)")
h("a_pyramidals_48[14].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.5281431), syn_SmallNet_pyr_pyr_AMPA_syn[0], 0.0, 0.0, 1.0))")
# Connection 1: 11, seg 0 (0.764879) -> 0, seg 3 (0.773212)
h("a_pyramidals_48[0].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[1] = new AMPA_syn(0.773212)")
h("a_pyramidals_48[11].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.76487905), syn_SmallNet_pyr_pyr_AMPA_syn[1], 0.0, 0.0, 1.0))")
# Connection 2: 17, seg 0 (0.533289) -> 0, seg 3 (0.144369)
h("a_pyramidals_48[0].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[2] = new AMPA_syn(0.144369)")
h("a_pyramidals_48[17].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.5332893), syn_SmallNet_pyr_pyr_AMPA_syn[2], 0.0, 0.0, 1.0))")
# Connection 3: 30, seg 0 (0.833402) -> 0, seg 3 (0.532087)
h("a_pyramidals_48[0].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[3] = new AMPA_syn(0.532087)")
h("a_pyramidals_48[30].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.83340174), syn_SmallNet_pyr_pyr_AMPA_syn[3], 0.0, 0.0, 1.0))")
# Connection 4: 46, seg 0 (0.579621) -> 0, seg 3 (0.957226)
h("a_pyramidals_48[0].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[4] = new AMPA_syn(0.957226)")
h("a_pyramidals_48[46].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.5796207), syn_SmallNet_pyr_pyr_AMPA_syn[4], 0.0, 0.0, 1.0))")
# Connection 5: 33, seg 0 (0.596923) -> 0, seg 3 (0.389855)
h("a_pyramidals_48[0].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[5] = new AMPA_syn(0.389855)")
h("a_pyramidals_48[33].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.5969233), syn_SmallNet_pyr_pyr_AMPA_syn[5], 0.0, 0.0, 1.0))")
# Connection 6: 8, seg 0 (0.355394) -> 0, seg 3 (0.436514)
h("a_pyramidals_48[0].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[6] = new AMPA_syn(0.436514)")
h("a_pyramidals_48[8].soma a_pyramidals_48[0].synlist.append(new NetCon(&v(0.35539436), syn_SmallNet_pyr_pyr_AMPA_syn[6], 0.0, 0.0, 1.0))")
# Connection 7: 7, seg 0 (0.638013) -> 1, seg 3 (0.081532)
h("a_pyramidals_48[1].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[7] = new AMPA_syn(0.081532)")
h("a_pyramidals_48[7].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.638013), syn_SmallNet_pyr_pyr_AMPA_syn[7], 0.0, 0.0, 1.0))")
# Connection 8: 6, seg 0 (0.271196) -> 1, seg 3 (0.484467)
h("a_pyramidals_48[1].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[8] = new AMPA_syn(0.484467)")
h("a_pyramidals_48[6].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.2711959), syn_SmallNet_pyr_pyr_AMPA_syn[8], 0.0, 0.0, 1.0))")
# Connection 9: 26, seg 0 (0.426214) -> 1, seg 3 (0.593457)
h("a_pyramidals_48[1].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[9] = new AMPA_syn(0.593457)")
h("a_pyramidals_48[26].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.4262135), syn_SmallNet_pyr_pyr_AMPA_syn[9], 0.0, 0.0, 1.0))")
# Connection 10: 22, seg 0 (0.503676) -> 1, seg 3 (0.968940)
h("a_pyramidals_48[1].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[10] = new AMPA_syn(0.968940)")
h("a_pyramidals_48[22].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.50367624), syn_SmallNet_pyr_pyr_AMPA_syn[10], 0.0, 0.0, 1.0))")
# Connection 11: 12, seg 0 (0.427743) -> 1, seg 3 (0.865828)
h("a_pyramidals_48[1].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[11] = new AMPA_syn(0.865828)")
h("a_pyramidals_48[12].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.42774254), syn_SmallNet_pyr_pyr_AMPA_syn[11], 0.0, 0.0, 1.0))")
# Connection 12: 10, seg 0 (0.126454) -> 1, seg 3 (0.713773)
h("a_pyramidals_48[1].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[12] = new AMPA_syn(0.713773)")
h("a_pyramidals_48[10].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.126454), syn_SmallNet_pyr_pyr_AMPA_syn[12], 0.0, 0.0, 1.0))")
# Connection 13: 14, seg 0 (0.959457) -> 1, seg 3 (0.182878)
h("a_pyramidals_48[1].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[13] = new AMPA_syn(0.182878)")
h("a_pyramidals_48[14].soma a_pyramidals_48[1].synlist.append(new NetCon(&v(0.9594574), syn_SmallNet_pyr_pyr_AMPA_syn[13], 0.0, 0.0, 1.0))")
# Connection 14: 6, seg 0 (0.548644) -> 2, seg 3 (0.492154)
h("a_pyramidals_48[2].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[14] = new AMPA_syn(0.492154)")
h("a_pyramidals_48[6].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.54864436), syn_SmallNet_pyr_pyr_AMPA_syn[14], 0.0, 0.0, 1.0))")
# Connection 15: 16, seg 0 (0.726368) -> 2, seg 3 (0.788630)
h("a_pyramidals_48[2].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[15] = new AMPA_syn(0.788630)")
h("a_pyramidals_48[16].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.7263682), syn_SmallNet_pyr_pyr_AMPA_syn[15], 0.0, 0.0, 1.0))")
# Connection 16: 17, seg 0 (0.667373) -> 2, seg 3 (0.177637)
h("a_pyramidals_48[2].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[16] = new AMPA_syn(0.177637)")
h("a_pyramidals_48[17].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.667373), syn_SmallNet_pyr_pyr_AMPA_syn[16], 0.0, 0.0, 1.0))")
# Connection 17: 42, seg 0 (0.286544) -> 2, seg 3 (0.228245)
h("a_pyramidals_48[2].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[17] = new AMPA_syn(0.228245)")
h("a_pyramidals_48[42].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.28654385), syn_SmallNet_pyr_pyr_AMPA_syn[17], 0.0, 0.0, 1.0))")
# Connection 18: 26, seg 0 (0.259292) -> 2, seg 3 (0.788661)
h("a_pyramidals_48[2].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[18] = new AMPA_syn(0.788661)")
h("a_pyramidals_48[26].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.25929207), syn_SmallNet_pyr_pyr_AMPA_syn[18], 0.0, 0.0, 1.0))")
# Connection 19: 27, seg 0 (0.032074) -> 2, seg 3 (0.872795)
h("a_pyramidals_48[2].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[19] = new AMPA_syn(0.872795)")
h("a_pyramidals_48[27].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.032073736), syn_SmallNet_pyr_pyr_AMPA_syn[19], 0.0, 0.0, 1.0))")
# Connection 20: 18, seg 0 (0.069563) -> 2, seg 3 (0.969954)
h("a_pyramidals_48[2].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[20] = new AMPA_syn(0.969954)")
h("a_pyramidals_48[18].soma a_pyramidals_48[2].synlist.append(new NetCon(&v(0.06956321), syn_SmallNet_pyr_pyr_AMPA_syn[20], 0.0, 0.0, 1.0))")
# Connection 21: 19, seg 0 (0.680623) -> 3, seg 3 (0.202635)
h("a_pyramidals_48[3].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[21] = new AMPA_syn(0.202635)")
h("a_pyramidals_48[19].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.68062264), syn_SmallNet_pyr_pyr_AMPA_syn[21], 0.0, 0.0, 1.0))")
# Connection 22: 18, seg 0 (0.194160) -> 3, seg 3 (0.359442)
h("a_pyramidals_48[3].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[22] = new AMPA_syn(0.359442)")
h("a_pyramidals_48[18].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.1941601), syn_SmallNet_pyr_pyr_AMPA_syn[22], 0.0, 0.0, 1.0))")
# Connection 23: 47, seg 0 (0.945691) -> 3, seg 3 (0.609381)
h("a_pyramidals_48[3].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[23] = new AMPA_syn(0.609381)")
h("a_pyramidals_48[47].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.94569117), syn_SmallNet_pyr_pyr_AMPA_syn[23], 0.0, 0.0, 1.0))")
# Connection 24: 43, seg 0 (0.122385) -> 3, seg 3 (0.575106)
h("a_pyramidals_48[3].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[24] = new AMPA_syn(0.575106)")
h("a_pyramidals_48[43].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.12238532), syn_SmallNet_pyr_pyr_AMPA_syn[24], 0.0, 0.0, 1.0))")
# Connection 25: 32, seg 0 (0.461656) -> 3, seg 3 (0.979203)
h("a_pyramidals_48[3].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[25] = new AMPA_syn(0.979203)")
h("a_pyramidals_48[32].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.46165633), syn_SmallNet_pyr_pyr_AMPA_syn[25], 0.0, 0.0, 1.0))")
# Connection 26: 10, seg 0 (0.144899) -> 3, seg 3 (0.720835)
h("a_pyramidals_48[3].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[26] = new AMPA_syn(0.720835)")
h("a_pyramidals_48[10].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.14489925), syn_SmallNet_pyr_pyr_AMPA_syn[26], 0.0, 0.0, 1.0))")
# Connection 27: 8, seg 0 (0.281622) -> 3, seg 3 (0.749554)
h("a_pyramidals_48[3].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[27] = new AMPA_syn(0.749554)")
h("a_pyramidals_48[8].soma a_pyramidals_48[3].synlist.append(new NetCon(&v(0.28162223), syn_SmallNet_pyr_pyr_AMPA_syn[27], 0.0, 0.0, 1.0))")
# Connection 28: 13, seg 0 (0.802859) -> 4, seg 3 (0.517594)
h("a_pyramidals_48[4].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[28] = new AMPA_syn(0.517594)")
h("a_pyramidals_48[13].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.80285877), syn_SmallNet_pyr_pyr_AMPA_syn[28], 0.0, 0.0, 1.0))")
# Connection 29: 27, seg 0 (0.787129) -> 4, seg 3 (0.404962)
h("a_pyramidals_48[4].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[29] = new AMPA_syn(0.404962)")
h("a_pyramidals_48[27].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.78712946), syn_SmallNet_pyr_pyr_AMPA_syn[29], 0.0, 0.0, 1.0))")
# Connection 30: 44, seg 0 (0.953782) -> 4, seg 3 (0.702730)
h("a_pyramidals_48[4].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[30] = new AMPA_syn(0.702730)")
h("a_pyramidals_48[44].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.95378166), syn_SmallNet_pyr_pyr_AMPA_syn[30], 0.0, 0.0, 1.0))")
# Connection 31: 38, seg 0 (0.604219) -> 4, seg 3 (0.451400)
h("a_pyramidals_48[4].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[31] = new AMPA_syn(0.451400)")
h("a_pyramidals_48[38].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.6042189), syn_SmallNet_pyr_pyr_AMPA_syn[31], 0.0, 0.0, 1.0))")
# Connection 32: 7, seg 0 (0.604963) -> 4, seg 3 (0.897619)
h("a_pyramidals_48[4].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[32] = new AMPA_syn(0.897619)")
h("a_pyramidals_48[7].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.60496265), syn_SmallNet_pyr_pyr_AMPA_syn[32], 0.0, 0.0, 1.0))")
# Connection 33: 10, seg 0 (0.655968) -> 4, seg 3 (0.471823)
h("a_pyramidals_48[4].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[33] = new AMPA_syn(0.471823)")
h("a_pyramidals_48[10].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.6559678), syn_SmallNet_pyr_pyr_AMPA_syn[33], 0.0, 0.0, 1.0))")
# Connection 34: 30, seg 0 (0.099235) -> 4, seg 3 (0.337417)
h("a_pyramidals_48[4].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[34] = new AMPA_syn(0.337417)")
h("a_pyramidals_48[30].soma a_pyramidals_48[4].synlist.append(new NetCon(&v(0.09923452), syn_SmallNet_pyr_pyr_AMPA_syn[34], 0.0, 0.0, 1.0))")
# Connection 35: 14, seg 0 (0.063664) -> 5, seg 3 (0.289841)
h("a_pyramidals_48[5].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[35] = new AMPA_syn(0.289841)")
h("a_pyramidals_48[14].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.06366402), syn_SmallNet_pyr_pyr_AMPA_syn[35], 0.0, 0.0, 1.0))")
# Connection 36: 46, seg 0 (0.860402) -> 5, seg 3 (0.077052)
h("a_pyramidals_48[5].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[36] = new AMPA_syn(0.077052)")
h("a_pyramidals_48[46].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.8604015), syn_SmallNet_pyr_pyr_AMPA_syn[36], 0.0, 0.0, 1.0))")
# Connection 37: 17, seg 0 (0.039474) -> 5, seg 3 (0.909170)
h("a_pyramidals_48[5].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[37] = new AMPA_syn(0.909170)")
h("a_pyramidals_48[17].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.039473712), syn_SmallNet_pyr_pyr_AMPA_syn[37], 0.0, 0.0, 1.0))")
# Connection 38: 16, seg 0 (0.320561) -> 5, seg 3 (0.541059)
h("a_pyramidals_48[5].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[38] = new AMPA_syn(0.541059)")
h("a_pyramidals_48[16].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.32056087), syn_SmallNet_pyr_pyr_AMPA_syn[38], 0.0, 0.0, 1.0))")
# Connection 39: 18, seg 0 (0.397935) -> 5, seg 3 (0.427017)
h("a_pyramidals_48[5].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[39] = new AMPA_syn(0.427017)")
h("a_pyramidals_48[18].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.39793468), syn_SmallNet_pyr_pyr_AMPA_syn[39], 0.0, 0.0, 1.0))")
# Connection 40: 43, seg 0 (0.901494) -> 5, seg 3 (0.139890)
h("a_pyramidals_48[5].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[40] = new AMPA_syn(0.139890)")
h("a_pyramidals_48[43].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.901494), syn_SmallNet_pyr_pyr_AMPA_syn[40], 0.0, 0.0, 1.0))")
# Connection 41: 33, seg 0 (0.239671) -> 5, seg 3 (0.362978)
h("a_pyramidals_48[5].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[41] = new AMPA_syn(0.362978)")
h("a_pyramidals_48[33].soma a_pyramidals_48[5].synlist.append(new NetCon(&v(0.23967129), syn_SmallNet_pyr_pyr_AMPA_syn[41], 0.0, 0.0, 1.0))")
# Connection 42: 39, seg 0 (0.155633) -> 6, seg 3 (0.596624)
h("a_pyramidals_48[6].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[42] = new AMPA_syn(0.596624)")
h("a_pyramidals_48[39].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.15563256), syn_SmallNet_pyr_pyr_AMPA_syn[42], 0.0, 0.0, 1.0))")
# Connection 43: 31, seg 0 (0.968904) -> 6, seg 3 (0.003431)
h("a_pyramidals_48[6].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[43] = new AMPA_syn(0.003431)")
h("a_pyramidals_48[31].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.96890426), syn_SmallNet_pyr_pyr_AMPA_syn[43], 0.0, 0.0, 1.0))")
# Connection 44: 22, seg 0 (0.219882) -> 6, seg 3 (0.645831)
h("a_pyramidals_48[6].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[44] = new AMPA_syn(0.645831)")
h("a_pyramidals_48[22].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.21988231), syn_SmallNet_pyr_pyr_AMPA_syn[44], 0.0, 0.0, 1.0))")
# Connection 45: 19, seg 0 (0.727325) -> 6, seg 3 (0.279466)
h("a_pyramidals_48[6].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[45] = new AMPA_syn(0.279466)")
h("a_pyramidals_48[19].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.72732455), syn_SmallNet_pyr_pyr_AMPA_syn[45], 0.0, 0.0, 1.0))")
# Connection 46: 12, seg 0 (0.696227) -> 6, seg 3 (0.686294)
h("a_pyramidals_48[6].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[46] = new AMPA_syn(0.686294)")
h("a_pyramidals_48[12].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.69622713), syn_SmallNet_pyr_pyr_AMPA_syn[46], 0.0, 0.0, 1.0))")
# Connection 47: 10, seg 0 (0.944665) -> 6, seg 3 (0.643455)
h("a_pyramidals_48[6].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[47] = new AMPA_syn(0.643455)")
h("a_pyramidals_48[10].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.9446648), syn_SmallNet_pyr_pyr_AMPA_syn[47], 0.0, 0.0, 1.0))")
# Connection 48: 34, seg 0 (0.228431) -> 6, seg 3 (0.029010)
h("a_pyramidals_48[6].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[48] = new AMPA_syn(0.029010)")
h("a_pyramidals_48[34].soma a_pyramidals_48[6].synlist.append(new NetCon(&v(0.22843117), syn_SmallNet_pyr_pyr_AMPA_syn[48], 0.0, 0.0, 1.0))")
# Connection 49: 27, seg 0 (0.779811) -> 7, seg 3 (0.713100)
h("a_pyramidals_48[7].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[49] = new AMPA_syn(0.713100)")
h("a_pyramidals_48[27].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.77981144), syn_SmallNet_pyr_pyr_AMPA_syn[49], 0.0, 0.0, 1.0))")
# Connection 50: 44, seg 0 (0.264525) -> 7, seg 3 (0.913014)
h("a_pyramidals_48[7].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[50] = new AMPA_syn(0.913014)")
h("a_pyramidals_48[44].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.2645247), syn_SmallNet_pyr_pyr_AMPA_syn[50], 0.0, 0.0, 1.0))")
# Connection 51: 13, seg 0 (0.712923) -> 7, seg 3 (0.555560)
h("a_pyramidals_48[7].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[51] = new AMPA_syn(0.555560)")
h("a_pyramidals_48[13].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.7129232), syn_SmallNet_pyr_pyr_AMPA_syn[51], 0.0, 0.0, 1.0))")
# Connection 52: 23, seg 0 (0.992714) -> 7, seg 3 (0.840353)
h("a_pyramidals_48[7].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[52] = new AMPA_syn(0.840353)")
h("a_pyramidals_48[23].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.9927141), syn_SmallNet_pyr_pyr_AMPA_syn[52], 0.0, 0.0, 1.0))")
# Connection 53: 33, seg 0 (0.867424) -> 7, seg 3 (0.029054)
h("a_pyramidals_48[7].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[53] = new AMPA_syn(0.029054)")
h("a_pyramidals_48[33].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.8674238), syn_SmallNet_pyr_pyr_AMPA_syn[53], 0.0, 0.0, 1.0))")
# Connection 54: 12, seg 0 (0.341969) -> 7, seg 3 (0.307221)
h("a_pyramidals_48[7].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[54] = new AMPA_syn(0.307221)")
h("a_pyramidals_48[12].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.34196883), syn_SmallNet_pyr_pyr_AMPA_syn[54], 0.0, 0.0, 1.0))")
# Connection 55: 25, seg 0 (0.620140) -> 7, seg 3 (0.716537)
h("a_pyramidals_48[7].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[55] = new AMPA_syn(0.716537)")
h("a_pyramidals_48[25].soma a_pyramidals_48[7].synlist.append(new NetCon(&v(0.6201397), syn_SmallNet_pyr_pyr_AMPA_syn[55], 0.0, 0.0, 1.0))")
# Connection 56: 21, seg 0 (0.301162) -> 8, seg 3 (0.202390)
h("a_pyramidals_48[8].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[56] = new AMPA_syn(0.202390)")
h("a_pyramidals_48[21].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.30116248), syn_SmallNet_pyr_pyr_AMPA_syn[56], 0.0, 0.0, 1.0))")
# Connection 57: 31, seg 0 (0.593431) -> 8, seg 3 (0.764722)
h("a_pyramidals_48[8].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[57] = new AMPA_syn(0.764722)")
h("a_pyramidals_48[31].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.59343064), syn_SmallNet_pyr_pyr_AMPA_syn[57], 0.0, 0.0, 1.0))")
# Connection 58: 45, seg 0 (0.901399) -> 8, seg 3 (0.462491)
h("a_pyramidals_48[8].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[58] = new AMPA_syn(0.462491)")
h("a_pyramidals_48[45].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.9013986), syn_SmallNet_pyr_pyr_AMPA_syn[58], 0.0, 0.0, 1.0))")
# Connection 59: 34, seg 0 (0.041602) -> 8, seg 3 (0.788847)
h("a_pyramidals_48[8].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[59] = new AMPA_syn(0.788847)")
h("a_pyramidals_48[34].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.041602254), syn_SmallNet_pyr_pyr_AMPA_syn[59], 0.0, 0.0, 1.0))")
# Connection 60: 23, seg 0 (0.735396) -> 8, seg 3 (0.290944)
h("a_pyramidals_48[8].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[60] = new AMPA_syn(0.290944)")
h("a_pyramidals_48[23].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.7353959), syn_SmallNet_pyr_pyr_AMPA_syn[60], 0.0, 0.0, 1.0))")
# Connection 61: 37, seg 0 (0.878065) -> 8, seg 3 (0.486583)
h("a_pyramidals_48[8].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[61] = new AMPA_syn(0.486583)")
h("a_pyramidals_48[37].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.8780652), syn_SmallNet_pyr_pyr_AMPA_syn[61], 0.0, 0.0, 1.0))")
# Connection 62: 41, seg 0 (0.258455) -> 8, seg 3 (0.211914)
h("a_pyramidals_48[8].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[62] = new AMPA_syn(0.211914)")
h("a_pyramidals_48[41].soma a_pyramidals_48[8].synlist.append(new NetCon(&v(0.2584551), syn_SmallNet_pyr_pyr_AMPA_syn[62], 0.0, 0.0, 1.0))")
# Connection 63: 15, seg 0 (0.088060) -> 9, seg 3 (0.966129)
h("a_pyramidals_48[9].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[63] = new AMPA_syn(0.966129)")
h("a_pyramidals_48[15].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.0880599), syn_SmallNet_pyr_pyr_AMPA_syn[63], 0.0, 0.0, 1.0))")
# Connection 64: 7, seg 0 (0.411607) -> 9, seg 3 (0.412017)
h("a_pyramidals_48[9].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[64] = new AMPA_syn(0.412017)")
h("a_pyramidals_48[7].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.41160697), syn_SmallNet_pyr_pyr_AMPA_syn[64], 0.0, 0.0, 1.0))")
# Connection 65: 28, seg 0 (0.830993) -> 9, seg 3 (0.077901)
h("a_pyramidals_48[9].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[65] = new AMPA_syn(0.077901)")
h("a_pyramidals_48[28].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.8309927), syn_SmallNet_pyr_pyr_AMPA_syn[65], 0.0, 0.0, 1.0))")
# Connection 66: 47, seg 0 (0.630264) -> 9, seg 3 (0.458238)
h("a_pyramidals_48[9].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[66] = new AMPA_syn(0.458238)")
h("a_pyramidals_48[47].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.63026434), syn_SmallNet_pyr_pyr_AMPA_syn[66], 0.0, 0.0, 1.0))")
# Connection 67: 38, seg 0 (0.018954) -> 9, seg 3 (0.832208)
h("a_pyramidals_48[9].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[67] = new AMPA_syn(0.832208)")
h("a_pyramidals_48[38].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.018953562), syn_SmallNet_pyr_pyr_AMPA_syn[67], 0.0, 0.0, 1.0))")
# Connection 68: 39, seg 0 (0.054486) -> 9, seg 3 (0.458807)
h("a_pyramidals_48[9].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[68] = new AMPA_syn(0.458807)")
h("a_pyramidals_48[39].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.054486394), syn_SmallNet_pyr_pyr_AMPA_syn[68], 0.0, 0.0, 1.0))")
# Connection 69: 29, seg 0 (0.755347) -> 9, seg 3 (0.604897)
h("a_pyramidals_48[9].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[69] = new AMPA_syn(0.604897)")
h("a_pyramidals_48[29].soma a_pyramidals_48[9].synlist.append(new NetCon(&v(0.7553471), syn_SmallNet_pyr_pyr_AMPA_syn[69], 0.0, 0.0, 1.0))")
# Connection 70: 33, seg 0 (0.283876) -> 10, seg 3 (0.829073)
h("a_pyramidals_48[10].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[70] = new AMPA_syn(0.829073)")
h("a_pyramidals_48[33].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.28387606), syn_SmallNet_pyr_pyr_AMPA_syn[70], 0.0, 0.0, 1.0))")
# Connection 71: 44, seg 0 (0.835565) -> 10, seg 3 (0.558523)
h("a_pyramidals_48[10].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[71] = new AMPA_syn(0.558523)")
h("a_pyramidals_48[44].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.8355652), syn_SmallNet_pyr_pyr_AMPA_syn[71], 0.0, 0.0, 1.0))")
# Connection 72: 42, seg 0 (0.332131) -> 10, seg 3 (0.559424)
h("a_pyramidals_48[10].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[72] = new AMPA_syn(0.559424)")
h("a_pyramidals_48[42].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.3321312), syn_SmallNet_pyr_pyr_AMPA_syn[72], 0.0, 0.0, 1.0))")
# Connection 73: 17, seg 0 (0.896078) -> 10, seg 3 (0.730510)
h("a_pyramidals_48[10].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[73] = new AMPA_syn(0.730510)")
h("a_pyramidals_48[17].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.8960775), syn_SmallNet_pyr_pyr_AMPA_syn[73], 0.0, 0.0, 1.0))")
# Connection 74: 7, seg 0 (0.141894) -> 10, seg 3 (0.237565)
h("a_pyramidals_48[10].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[74] = new AMPA_syn(0.237565)")
h("a_pyramidals_48[7].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.14189446), syn_SmallNet_pyr_pyr_AMPA_syn[74], 0.0, 0.0, 1.0))")
# Connection 75: 21, seg 0 (0.254696) -> 10, seg 3 (0.121866)
h("a_pyramidals_48[10].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[75] = new AMPA_syn(0.121866)")
h("a_pyramidals_48[21].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.2546957), syn_SmallNet_pyr_pyr_AMPA_syn[75], 0.0, 0.0, 1.0))")
# Connection 76: 39, seg 0 (0.495750) -> 10, seg 3 (0.866411)
h("a_pyramidals_48[10].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[76] = new AMPA_syn(0.866411)")
h("a_pyramidals_48[39].soma a_pyramidals_48[10].synlist.append(new NetCon(&v(0.49574977), syn_SmallNet_pyr_pyr_AMPA_syn[76], 0.0, 0.0, 1.0))")
# Connection 77: 24, seg 0 (0.634343) -> 11, seg 3 (0.223135)
h("a_pyramidals_48[11].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[77] = new AMPA_syn(0.223135)")
h("a_pyramidals_48[24].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.6343433), syn_SmallNet_pyr_pyr_AMPA_syn[77], 0.0, 0.0, 1.0))")
# Connection 78: 30, seg 0 (0.205772) -> 11, seg 3 (0.896021)
h("a_pyramidals_48[11].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[78] = new AMPA_syn(0.896021)")
h("a_pyramidals_48[30].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.20577228), syn_SmallNet_pyr_pyr_AMPA_syn[78], 0.0, 0.0, 1.0))")
# Connection 79: 4, seg 0 (0.052956) -> 11, seg 3 (0.522092)
h("a_pyramidals_48[11].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[79] = new AMPA_syn(0.522092)")
h("a_pyramidals_48[4].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.052955866), syn_SmallNet_pyr_pyr_AMPA_syn[79], 0.0, 0.0, 1.0))")
# Connection 80: 7, seg 0 (0.047059) -> 11, seg 3 (0.015000)
h("a_pyramidals_48[11].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[80] = new AMPA_syn(0.015000)")
h("a_pyramidals_48[7].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.047059357), syn_SmallNet_pyr_pyr_AMPA_syn[80], 0.0, 0.0, 1.0))")
# Connection 81: 38, seg 0 (0.723724) -> 11, seg 3 (0.523893)
h("a_pyramidals_48[11].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[81] = new AMPA_syn(0.523893)")
h("a_pyramidals_48[38].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.72372425), syn_SmallNet_pyr_pyr_AMPA_syn[81], 0.0, 0.0, 1.0))")
# Connection 82: 29, seg 0 (0.291625) -> 11, seg 3 (0.590419)
h("a_pyramidals_48[11].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[82] = new AMPA_syn(0.590419)")
h("a_pyramidals_48[29].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.29162478), syn_SmallNet_pyr_pyr_AMPA_syn[82], 0.0, 0.0, 1.0))")
# Connection 83: 12, seg 0 (0.782290) -> 11, seg 3 (0.640835)
h("a_pyramidals_48[11].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[83] = new AMPA_syn(0.640835)")
h("a_pyramidals_48[12].soma a_pyramidals_48[11].synlist.append(new NetCon(&v(0.78229034), syn_SmallNet_pyr_pyr_AMPA_syn[83], 0.0, 0.0, 1.0))")
# Connection 84: 3, seg 0 (0.134557) -> 12, seg 3 (0.230680)
h("a_pyramidals_48[12].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[84] = new AMPA_syn(0.230680)")
h("a_pyramidals_48[3].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.13455677), syn_SmallNet_pyr_pyr_AMPA_syn[84], 0.0, 0.0, 1.0))")
# Connection 85: 13, seg 0 (0.169980) -> 12, seg 3 (0.348496)
h("a_pyramidals_48[12].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[85] = new AMPA_syn(0.348496)")
h("a_pyramidals_48[13].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.16997999), syn_SmallNet_pyr_pyr_AMPA_syn[85], 0.0, 0.0, 1.0))")
# Connection 86: 24, seg 0 (0.495295) -> 12, seg 3 (0.718436)
h("a_pyramidals_48[12].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[86] = new AMPA_syn(0.718436)")
h("a_pyramidals_48[24].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.4952953), syn_SmallNet_pyr_pyr_AMPA_syn[86], 0.0, 0.0, 1.0))")
# Connection 87: 5, seg 0 (0.088338) -> 12, seg 3 (0.761792)
h("a_pyramidals_48[12].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[87] = new AMPA_syn(0.761792)")
h("a_pyramidals_48[5].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.08833754), syn_SmallNet_pyr_pyr_AMPA_syn[87], 0.0, 0.0, 1.0))")
# Connection 88: 4, seg 0 (0.973610) -> 12, seg 3 (0.056676)
h("a_pyramidals_48[12].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[88] = new AMPA_syn(0.056676)")
h("a_pyramidals_48[4].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.97361004), syn_SmallNet_pyr_pyr_AMPA_syn[88], 0.0, 0.0, 1.0))")
# Connection 89: 46, seg 0 (0.900976) -> 12, seg 3 (0.244782)
h("a_pyramidals_48[12].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[89] = new AMPA_syn(0.244782)")
h("a_pyramidals_48[46].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.90097606), syn_SmallNet_pyr_pyr_AMPA_syn[89], 0.0, 0.0, 1.0))")
# Connection 90: 33, seg 0 (0.694393) -> 12, seg 3 (0.540938)
h("a_pyramidals_48[12].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[90] = new AMPA_syn(0.540938)")
h("a_pyramidals_48[33].soma a_pyramidals_48[12].synlist.append(new NetCon(&v(0.69439274), syn_SmallNet_pyr_pyr_AMPA_syn[90], 0.0, 0.0, 1.0))")
# Connection 91: 22, seg 0 (0.997574) -> 13, seg 3 (0.398308)
h("a_pyramidals_48[13].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[91] = new AMPA_syn(0.398308)")
h("a_pyramidals_48[22].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.9975737), syn_SmallNet_pyr_pyr_AMPA_syn[91], 0.0, 0.0, 1.0))")
# Connection 92: 25, seg 0 (0.950044) -> 13, seg 3 (0.446576)
h("a_pyramidals_48[13].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[92] = new AMPA_syn(0.446576)")
h("a_pyramidals_48[25].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.95004386), syn_SmallNet_pyr_pyr_AMPA_syn[92], 0.0, 0.0, 1.0))")
# Connection 93: 0, seg 0 (0.922665) -> 13, seg 3 (0.839789)
h("a_pyramidals_48[13].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[93] = new AMPA_syn(0.839789)")
h("a_pyramidals_48[0].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.92266464), syn_SmallNet_pyr_pyr_AMPA_syn[93], 0.0, 0.0, 1.0))")
# Connection 94: 28, seg 0 (0.375572) -> 13, seg 3 (0.923855)
h("a_pyramidals_48[13].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[94] = new AMPA_syn(0.923855)")
h("a_pyramidals_48[28].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.3755722), syn_SmallNet_pyr_pyr_AMPA_syn[94], 0.0, 0.0, 1.0))")
# Connection 95: 23, seg 0 (0.154839) -> 13, seg 3 (0.231490)
h("a_pyramidals_48[13].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[95] = new AMPA_syn(0.231490)")
h("a_pyramidals_48[23].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.1548391), syn_SmallNet_pyr_pyr_AMPA_syn[95], 0.0, 0.0, 1.0))")
# Connection 96: 5, seg 0 (0.046844) -> 13, seg 3 (0.588852)
h("a_pyramidals_48[13].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[96] = new AMPA_syn(0.588852)")
h("a_pyramidals_48[5].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.046844125), syn_SmallNet_pyr_pyr_AMPA_syn[96], 0.0, 0.0, 1.0))")
# Connection 97: 14, seg 0 (0.495157) -> 13, seg 3 (0.064138)
h("a_pyramidals_48[13].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[97] = new AMPA_syn(0.064138)")
h("a_pyramidals_48[14].soma a_pyramidals_48[13].synlist.append(new NetCon(&v(0.49515736), syn_SmallNet_pyr_pyr_AMPA_syn[97], 0.0, 0.0, 1.0))")
# Connection 98: 43, seg 0 (0.603047) -> 14, seg 3 (0.476037)
h("a_pyramidals_48[14].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[98] = new AMPA_syn(0.476037)")
h("a_pyramidals_48[43].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.6030469), syn_SmallNet_pyr_pyr_AMPA_syn[98], 0.0, 0.0, 1.0))")
# Connection 99: 46, seg 0 (0.168351) -> 14, seg 3 (0.306206)
h("a_pyramidals_48[14].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[99] = new AMPA_syn(0.306206)")
h("a_pyramidals_48[46].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.16835135), syn_SmallNet_pyr_pyr_AMPA_syn[99], 0.0, 0.0, 1.0))")
# Connection 100: 2, seg 0 (0.780401) -> 14, seg 3 (0.850312)
h("a_pyramidals_48[14].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[100] = new AMPA_syn(0.850312)")
h("a_pyramidals_48[2].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.7804006), syn_SmallNet_pyr_pyr_AMPA_syn[100], 0.0, 0.0, 1.0))")
# Connection 101: 19, seg 0 (0.173829) -> 14, seg 3 (0.224782)
h("a_pyramidals_48[14].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[101] = new AMPA_syn(0.224782)")
h("a_pyramidals_48[19].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.1738295), syn_SmallNet_pyr_pyr_AMPA_syn[101], 0.0, 0.0, 1.0))")
# Connection 102: 20, seg 0 (0.285948) -> 14, seg 3 (0.706104)
h("a_pyramidals_48[14].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[102] = new AMPA_syn(0.706104)")
h("a_pyramidals_48[20].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.28594762), syn_SmallNet_pyr_pyr_AMPA_syn[102], 0.0, 0.0, 1.0))")
# Connection 103: 32, seg 0 (0.373093) -> 14, seg 3 (0.886280)
h("a_pyramidals_48[14].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[103] = new AMPA_syn(0.886280)")
h("a_pyramidals_48[32].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.37309253), syn_SmallNet_pyr_pyr_AMPA_syn[103], 0.0, 0.0, 1.0))")
# Connection 104: 30, seg 0 (0.549137) -> 14, seg 3 (0.750137)
h("a_pyramidals_48[14].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[104] = new AMPA_syn(0.750137)")
h("a_pyramidals_48[30].soma a_pyramidals_48[14].synlist.append(new NetCon(&v(0.5491369), syn_SmallNet_pyr_pyr_AMPA_syn[104], 0.0, 0.0, 1.0))")
# Connection 105: 7, seg 0 (0.419113) -> 15, seg 3 (0.733941)
h("a_pyramidals_48[15].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[105] = new AMPA_syn(0.733941)")
h("a_pyramidals_48[7].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.4191125), syn_SmallNet_pyr_pyr_AMPA_syn[105], 0.0, 0.0, 1.0))")
# Connection 106: 32, seg 0 (0.580951) -> 15, seg 3 (0.539659)
h("a_pyramidals_48[15].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[106] = new AMPA_syn(0.539659)")
h("a_pyramidals_48[32].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.58095133), syn_SmallNet_pyr_pyr_AMPA_syn[106], 0.0, 0.0, 1.0))")
# Connection 107: 24, seg 0 (0.421360) -> 15, seg 3 (0.242393)
h("a_pyramidals_48[15].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[107] = new AMPA_syn(0.242393)")
h("a_pyramidals_48[24].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.42135978), syn_SmallNet_pyr_pyr_AMPA_syn[107], 0.0, 0.0, 1.0))")
# Connection 108: 30, seg 0 (0.766717) -> 15, seg 3 (0.647923)
h("a_pyramidals_48[15].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[108] = new AMPA_syn(0.647923)")
h("a_pyramidals_48[30].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.766717), syn_SmallNet_pyr_pyr_AMPA_syn[108], 0.0, 0.0, 1.0))")
# Connection 109: 46, seg 0 (0.048083) -> 15, seg 3 (0.032695)
h("a_pyramidals_48[15].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[109] = new AMPA_syn(0.032695)")
h("a_pyramidals_48[46].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.048083305), syn_SmallNet_pyr_pyr_AMPA_syn[109], 0.0, 0.0, 1.0))")
# Connection 110: 47, seg 0 (0.096204) -> 15, seg 3 (0.092735)
h("a_pyramidals_48[15].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[110] = new AMPA_syn(0.092735)")
h("a_pyramidals_48[47].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.09620392), syn_SmallNet_pyr_pyr_AMPA_syn[110], 0.0, 0.0, 1.0))")
# Connection 111: 20, seg 0 (0.850072) -> 15, seg 3 (0.610910)
h("a_pyramidals_48[15].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[111] = new AMPA_syn(0.610910)")
h("a_pyramidals_48[20].soma a_pyramidals_48[15].synlist.append(new NetCon(&v(0.85007226), syn_SmallNet_pyr_pyr_AMPA_syn[111], 0.0, 0.0, 1.0))")
# Connection 112: 20, seg 0 (0.766116) -> 16, seg 3 (0.494481)
h("a_pyramidals_48[16].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[112] = new AMPA_syn(0.494481)")
h("a_pyramidals_48[20].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.7661156), syn_SmallNet_pyr_pyr_AMPA_syn[112], 0.0, 0.0, 1.0))")
# Connection 113: 27, seg 0 (0.506142) -> 16, seg 3 (0.562041)
h("a_pyramidals_48[16].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[113] = new AMPA_syn(0.562041)")
h("a_pyramidals_48[27].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.5061425), syn_SmallNet_pyr_pyr_AMPA_syn[113], 0.0, 0.0, 1.0))")
# Connection 114: 39, seg 0 (0.592511) -> 16, seg 3 (0.548074)
h("a_pyramidals_48[16].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[114] = new AMPA_syn(0.548074)")
h("a_pyramidals_48[39].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.5925108), syn_SmallNet_pyr_pyr_AMPA_syn[114], 0.0, 0.0, 1.0))")
# Connection 115: 5, seg 0 (0.332487) -> 16, seg 3 (0.364947)
h("a_pyramidals_48[16].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[115] = new AMPA_syn(0.364947)")
h("a_pyramidals_48[5].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.3324874), syn_SmallNet_pyr_pyr_AMPA_syn[115], 0.0, 0.0, 1.0))")
# Connection 116: 10, seg 0 (0.628529) -> 16, seg 3 (0.825434)
h("a_pyramidals_48[16].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[116] = new AMPA_syn(0.825434)")
h("a_pyramidals_48[10].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.6285295), syn_SmallNet_pyr_pyr_AMPA_syn[116], 0.0, 0.0, 1.0))")
# Connection 117: 8, seg 0 (0.110573) -> 16, seg 3 (0.542861)
h("a_pyramidals_48[16].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[117] = new AMPA_syn(0.542861)")
h("a_pyramidals_48[8].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.110572696), syn_SmallNet_pyr_pyr_AMPA_syn[117], 0.0, 0.0, 1.0))")
# Connection 118: 1, seg 0 (0.340489) -> 16, seg 3 (0.836299)
h("a_pyramidals_48[16].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[118] = new AMPA_syn(0.836299)")
h("a_pyramidals_48[1].soma a_pyramidals_48[16].synlist.append(new NetCon(&v(0.34048945), syn_SmallNet_pyr_pyr_AMPA_syn[118], 0.0, 0.0, 1.0))")
# Connection 119: 41, seg 0 (0.262179) -> 17, seg 3 (0.428280)
h("a_pyramidals_48[17].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[119] = new AMPA_syn(0.428280)")
h("a_pyramidals_48[41].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.2621795), syn_SmallNet_pyr_pyr_AMPA_syn[119], 0.0, 0.0, 1.0))")
# Connection 120: 35, seg 0 (0.879413) -> 17, seg 3 (0.178331)
h("a_pyramidals_48[17].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[120] = new AMPA_syn(0.178331)")
h("a_pyramidals_48[35].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.8794126), syn_SmallNet_pyr_pyr_AMPA_syn[120], 0.0, 0.0, 1.0))")
# Connection 121: 13, seg 0 (0.420015) -> 17, seg 3 (0.025886)
h("a_pyramidals_48[17].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[121] = new AMPA_syn(0.025886)")
h("a_pyramidals_48[13].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.42001498), syn_SmallNet_pyr_pyr_AMPA_syn[121], 0.0, 0.0, 1.0))")
# Connection 122: 36, seg 0 (0.527370) -> 17, seg 3 (0.776999)
h("a_pyramidals_48[17].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[122] = new AMPA_syn(0.776999)")
h("a_pyramidals_48[36].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.52736956), syn_SmallNet_pyr_pyr_AMPA_syn[122], 0.0, 0.0, 1.0))")
# Connection 123: 24, seg 0 (0.441678) -> 17, seg 3 (0.228215)
h("a_pyramidals_48[17].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[123] = new AMPA_syn(0.228215)")
h("a_pyramidals_48[24].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.441678), syn_SmallNet_pyr_pyr_AMPA_syn[123], 0.0, 0.0, 1.0))")
# Connection 124: 44, seg 0 (0.024095) -> 17, seg 3 (0.273597)
h("a_pyramidals_48[17].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[124] = new AMPA_syn(0.273597)")
h("a_pyramidals_48[44].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.024095297), syn_SmallNet_pyr_pyr_AMPA_syn[124], 0.0, 0.0, 1.0))")
# Connection 125: 47, seg 0 (0.028007) -> 17, seg 3 (0.006848)
h("a_pyramidals_48[17].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[125] = new AMPA_syn(0.006848)")
h("a_pyramidals_48[47].soma a_pyramidals_48[17].synlist.append(new NetCon(&v(0.028006732), syn_SmallNet_pyr_pyr_AMPA_syn[125], 0.0, 0.0, 1.0))")
# Connection 126: 38, seg 0 (0.299700) -> 18, seg 3 (0.372168)
h("a_pyramidals_48[18].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[126] = new AMPA_syn(0.372168)")
h("a_pyramidals_48[38].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.29969978), syn_SmallNet_pyr_pyr_AMPA_syn[126], 0.0, 0.0, 1.0))")
# Connection 127: 33, seg 0 (0.116873) -> 18, seg 3 (0.287478)
h("a_pyramidals_48[18].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[127] = new AMPA_syn(0.287478)")
h("a_pyramidals_48[33].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.11687273), syn_SmallNet_pyr_pyr_AMPA_syn[127], 0.0, 0.0, 1.0))")
# Connection 128: 27, seg 0 (0.254033) -> 18, seg 3 (0.840261)
h("a_pyramidals_48[18].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[128] = new AMPA_syn(0.840261)")
h("a_pyramidals_48[27].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.2540329), syn_SmallNet_pyr_pyr_AMPA_syn[128], 0.0, 0.0, 1.0))")
# Connection 129: 24, seg 0 (0.618167) -> 18, seg 3 (0.049727)
h("a_pyramidals_48[18].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[129] = new AMPA_syn(0.049727)")
h("a_pyramidals_48[24].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.61816716), syn_SmallNet_pyr_pyr_AMPA_syn[129], 0.0, 0.0, 1.0))")
# Connection 130: 19, seg 0 (0.127609) -> 18, seg 3 (0.794195)
h("a_pyramidals_48[18].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[130] = new AMPA_syn(0.794195)")
h("a_pyramidals_48[19].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.12760913), syn_SmallNet_pyr_pyr_AMPA_syn[130], 0.0, 0.0, 1.0))")
# Connection 131: 43, seg 0 (0.374393) -> 18, seg 3 (0.000139)
h("a_pyramidals_48[18].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[131] = new AMPA_syn(0.000139)")
h("a_pyramidals_48[43].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.37439346), syn_SmallNet_pyr_pyr_AMPA_syn[131], 0.0, 0.0, 1.0))")
# Connection 132: 6, seg 0 (0.639816) -> 18, seg 3 (0.268771)
h("a_pyramidals_48[18].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[132] = new AMPA_syn(0.268771)")
h("a_pyramidals_48[6].soma a_pyramidals_48[18].synlist.append(new NetCon(&v(0.63981646), syn_SmallNet_pyr_pyr_AMPA_syn[132], 0.0, 0.0, 1.0))")
# Connection 133: 23, seg 0 (0.005174) -> 19, seg 3 (0.034635)
h("a_pyramidals_48[19].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[133] = new AMPA_syn(0.034635)")
h("a_pyramidals_48[23].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.005173862), syn_SmallNet_pyr_pyr_AMPA_syn[133], 0.0, 0.0, 1.0))")
# Connection 134: 14, seg 0 (0.584894) -> 19, seg 3 (0.393950)
h("a_pyramidals_48[19].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[134] = new AMPA_syn(0.393950)")
h("a_pyramidals_48[14].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.5848943), syn_SmallNet_pyr_pyr_AMPA_syn[134], 0.0, 0.0, 1.0))")
# Connection 135: 13, seg 0 (0.546665) -> 19, seg 3 (0.564942)
h("a_pyramidals_48[19].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[135] = new AMPA_syn(0.564942)")
h("a_pyramidals_48[13].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.54666543), syn_SmallNet_pyr_pyr_AMPA_syn[135], 0.0, 0.0, 1.0))")
# Connection 136: 37, seg 0 (0.807682) -> 19, seg 3 (0.939441)
h("a_pyramidals_48[19].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[136] = new AMPA_syn(0.939441)")
h("a_pyramidals_48[37].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.8076822), syn_SmallNet_pyr_pyr_AMPA_syn[136], 0.0, 0.0, 1.0))")
# Connection 137: 39, seg 0 (0.506475) -> 19, seg 3 (0.027469)
h("a_pyramidals_48[19].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[137] = new AMPA_syn(0.027469)")
h("a_pyramidals_48[39].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.50647545), syn_SmallNet_pyr_pyr_AMPA_syn[137], 0.0, 0.0, 1.0))")
# Connection 138: 26, seg 0 (0.750321) -> 19, seg 3 (0.015701)
h("a_pyramidals_48[19].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[138] = new AMPA_syn(0.015701)")
h("a_pyramidals_48[26].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.7503214), syn_SmallNet_pyr_pyr_AMPA_syn[138], 0.0, 0.0, 1.0))")
# Connection 139: 41, seg 0 (0.693417) -> 19, seg 3 (0.094285)
h("a_pyramidals_48[19].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[139] = new AMPA_syn(0.094285)")
h("a_pyramidals_48[41].soma a_pyramidals_48[19].synlist.append(new NetCon(&v(0.6934166), syn_SmallNet_pyr_pyr_AMPA_syn[139], 0.0, 0.0, 1.0))")
# Connection 140: 3, seg 0 (0.639637) -> 20, seg 3 (0.843327)
h("a_pyramidals_48[20].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[140] = new AMPA_syn(0.843327)")
h("a_pyramidals_48[3].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.6396372), syn_SmallNet_pyr_pyr_AMPA_syn[140], 0.0, 0.0, 1.0))")
# Connection 141: 1, seg 0 (0.361337) -> 20, seg 3 (0.209566)
h("a_pyramidals_48[20].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[141] = new AMPA_syn(0.209566)")
h("a_pyramidals_48[1].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.3613369), syn_SmallNet_pyr_pyr_AMPA_syn[141], 0.0, 0.0, 1.0))")
# Connection 142: 22, seg 0 (0.533350) -> 20, seg 3 (0.864069)
h("a_pyramidals_48[20].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[142] = new AMPA_syn(0.864069)")
h("a_pyramidals_48[22].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.5333501), syn_SmallNet_pyr_pyr_AMPA_syn[142], 0.0, 0.0, 1.0))")
# Connection 143: 11, seg 0 (0.686830) -> 20, seg 3 (0.038544)
h("a_pyramidals_48[20].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[143] = new AMPA_syn(0.038544)")
h("a_pyramidals_48[11].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.68682986), syn_SmallNet_pyr_pyr_AMPA_syn[143], 0.0, 0.0, 1.0))")
# Connection 144: 10, seg 0 (0.991977) -> 20, seg 3 (0.365912)
h("a_pyramidals_48[20].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[144] = new AMPA_syn(0.365912)")
h("a_pyramidals_48[10].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.99197716), syn_SmallNet_pyr_pyr_AMPA_syn[144], 0.0, 0.0, 1.0))")
# Connection 145: 41, seg 0 (0.735249) -> 20, seg 3 (0.067976)
h("a_pyramidals_48[20].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[145] = new AMPA_syn(0.067976)")
h("a_pyramidals_48[41].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.73524904), syn_SmallNet_pyr_pyr_AMPA_syn[145], 0.0, 0.0, 1.0))")
# Connection 146: 19, seg 0 (0.988807) -> 20, seg 3 (0.969058)
h("a_pyramidals_48[20].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[146] = new AMPA_syn(0.969058)")
h("a_pyramidals_48[19].soma a_pyramidals_48[20].synlist.append(new NetCon(&v(0.98880696), syn_SmallNet_pyr_pyr_AMPA_syn[146], 0.0, 0.0, 1.0))")
# Connection 147: 16, seg 0 (0.943719) -> 21, seg 3 (0.817841)
h("a_pyramidals_48[21].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[147] = new AMPA_syn(0.817841)")
h("a_pyramidals_48[16].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.9437193), syn_SmallNet_pyr_pyr_AMPA_syn[147], 0.0, 0.0, 1.0))")
# Connection 148: 7, seg 0 (0.279563) -> 21, seg 3 (0.395980)
h("a_pyramidals_48[21].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[148] = new AMPA_syn(0.395980)")
h("a_pyramidals_48[7].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.27956283), syn_SmallNet_pyr_pyr_AMPA_syn[148], 0.0, 0.0, 1.0))")
# Connection 149: 15, seg 0 (0.565799) -> 21, seg 3 (0.826177)
h("a_pyramidals_48[21].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[149] = new AMPA_syn(0.826177)")
h("a_pyramidals_48[15].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.565799), syn_SmallNet_pyr_pyr_AMPA_syn[149], 0.0, 0.0, 1.0))")
# Connection 150: 4, seg 0 (0.532454) -> 21, seg 3 (0.962771)
h("a_pyramidals_48[21].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[150] = new AMPA_syn(0.962771)")
h("a_pyramidals_48[4].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.53245443), syn_SmallNet_pyr_pyr_AMPA_syn[150], 0.0, 0.0, 1.0))")
# Connection 151: 45, seg 0 (0.576109) -> 21, seg 3 (0.016917)
h("a_pyramidals_48[21].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[151] = new AMPA_syn(0.016917)")
h("a_pyramidals_48[45].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.57610905), syn_SmallNet_pyr_pyr_AMPA_syn[151], 0.0, 0.0, 1.0))")
# Connection 152: 28, seg 0 (0.797719) -> 21, seg 3 (0.034294)
h("a_pyramidals_48[21].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[152] = new AMPA_syn(0.034294)")
h("a_pyramidals_48[28].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.79771906), syn_SmallNet_pyr_pyr_AMPA_syn[152], 0.0, 0.0, 1.0))")
# Connection 153: 24, seg 0 (0.971013) -> 21, seg 3 (0.419808)
h("a_pyramidals_48[21].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[153] = new AMPA_syn(0.419808)")
h("a_pyramidals_48[24].soma a_pyramidals_48[21].synlist.append(new NetCon(&v(0.97101265), syn_SmallNet_pyr_pyr_AMPA_syn[153], 0.0, 0.0, 1.0))")
# Connection 154: 24, seg 0 (0.061791) -> 22, seg 3 (0.679744)
h("a_pyramidals_48[22].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[154] = new AMPA_syn(0.679744)")
h("a_pyramidals_48[24].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.06179136), syn_SmallNet_pyr_pyr_AMPA_syn[154], 0.0, 0.0, 1.0))")
# Connection 155: 44, seg 0 (0.227068) -> 22, seg 3 (0.370027)
h("a_pyramidals_48[22].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[155] = new AMPA_syn(0.370027)")
h("a_pyramidals_48[44].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.22706777), syn_SmallNet_pyr_pyr_AMPA_syn[155], 0.0, 0.0, 1.0))")
# Connection 156: 43, seg 0 (0.482887) -> 22, seg 3 (0.278206)
h("a_pyramidals_48[22].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[156] = new AMPA_syn(0.278206)")
h("a_pyramidals_48[43].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.48288703), syn_SmallNet_pyr_pyr_AMPA_syn[156], 0.0, 0.0, 1.0))")
# Connection 157: 12, seg 0 (0.572246) -> 22, seg 3 (0.480232)
h("a_pyramidals_48[22].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[157] = new AMPA_syn(0.480232)")
h("a_pyramidals_48[12].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.57224643), syn_SmallNet_pyr_pyr_AMPA_syn[157], 0.0, 0.0, 1.0))")
# Connection 158: 40, seg 0 (0.794152) -> 22, seg 3 (0.138994)
h("a_pyramidals_48[22].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[158] = new AMPA_syn(0.138994)")
h("a_pyramidals_48[40].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.7941523), syn_SmallNet_pyr_pyr_AMPA_syn[158], 0.0, 0.0, 1.0))")
# Connection 159: 14, seg 0 (0.439893) -> 22, seg 3 (0.172102)
h("a_pyramidals_48[22].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[159] = new AMPA_syn(0.172102)")
h("a_pyramidals_48[14].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.439893), syn_SmallNet_pyr_pyr_AMPA_syn[159], 0.0, 0.0, 1.0))")
# Connection 160: 4, seg 0 (0.943964) -> 22, seg 3 (0.178789)
h("a_pyramidals_48[22].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[160] = new AMPA_syn(0.178789)")
h("a_pyramidals_48[4].soma a_pyramidals_48[22].synlist.append(new NetCon(&v(0.9439641), syn_SmallNet_pyr_pyr_AMPA_syn[160], 0.0, 0.0, 1.0))")
# Connection 161: 7, seg 0 (0.567095) -> 23, seg 3 (0.232057)
h("a_pyramidals_48[23].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[161] = new AMPA_syn(0.232057)")
h("a_pyramidals_48[7].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.56709516), syn_SmallNet_pyr_pyr_AMPA_syn[161], 0.0, 0.0, 1.0))")
# Connection 162: 35, seg 0 (0.331388) -> 23, seg 3 (0.553664)
h("a_pyramidals_48[23].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[162] = new AMPA_syn(0.553664)")
h("a_pyramidals_48[35].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.33138764), syn_SmallNet_pyr_pyr_AMPA_syn[162], 0.0, 0.0, 1.0))")
# Connection 163: 29, seg 0 (0.918829) -> 23, seg 3 (0.341060)
h("a_pyramidals_48[23].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[163] = new AMPA_syn(0.341060)")
h("a_pyramidals_48[29].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.9188287), syn_SmallNet_pyr_pyr_AMPA_syn[163], 0.0, 0.0, 1.0))")
# Connection 164: 34, seg 0 (0.207205) -> 23, seg 3 (0.823411)
h("a_pyramidals_48[23].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[164] = new AMPA_syn(0.823411)")
h("a_pyramidals_48[34].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.2072047), syn_SmallNet_pyr_pyr_AMPA_syn[164], 0.0, 0.0, 1.0))")
# Connection 165: 17, seg 0 (0.862720) -> 23, seg 3 (0.689691)
h("a_pyramidals_48[23].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[165] = new AMPA_syn(0.689691)")
h("a_pyramidals_48[17].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.86271983), syn_SmallNet_pyr_pyr_AMPA_syn[165], 0.0, 0.0, 1.0))")
# Connection 166: 9, seg 0 (0.767096) -> 23, seg 3 (0.082629)
h("a_pyramidals_48[23].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[166] = new AMPA_syn(0.082629)")
h("a_pyramidals_48[9].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.76709586), syn_SmallNet_pyr_pyr_AMPA_syn[166], 0.0, 0.0, 1.0))")
# Connection 167: 43, seg 0 (0.658410) -> 23, seg 3 (0.318795)
h("a_pyramidals_48[23].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[167] = new AMPA_syn(0.318795)")
h("a_pyramidals_48[43].soma a_pyramidals_48[23].synlist.append(new NetCon(&v(0.65841), syn_SmallNet_pyr_pyr_AMPA_syn[167], 0.0, 0.0, 1.0))")
# Connection 168: 35, seg 0 (0.231721) -> 24, seg 3 (0.018219)
h("a_pyramidals_48[24].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[168] = new AMPA_syn(0.018219)")
h("a_pyramidals_48[35].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.23172057), syn_SmallNet_pyr_pyr_AMPA_syn[168], 0.0, 0.0, 1.0))")
# Connection 169: 6, seg 0 (0.259277) -> 24, seg 3 (0.577913)
h("a_pyramidals_48[24].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[169] = new AMPA_syn(0.577913)")
h("a_pyramidals_48[6].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.25927687), syn_SmallNet_pyr_pyr_AMPA_syn[169], 0.0, 0.0, 1.0))")
# Connection 170: 12, seg 0 (0.664452) -> 24, seg 3 (0.818109)
h("a_pyramidals_48[24].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[170] = new AMPA_syn(0.818109)")
h("a_pyramidals_48[12].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.6644516), syn_SmallNet_pyr_pyr_AMPA_syn[170], 0.0, 0.0, 1.0))")
# Connection 171: 13, seg 0 (0.346160) -> 24, seg 3 (0.202842)
h("a_pyramidals_48[24].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[171] = new AMPA_syn(0.202842)")
h("a_pyramidals_48[13].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.3461601), syn_SmallNet_pyr_pyr_AMPA_syn[171], 0.0, 0.0, 1.0))")
# Connection 172: 18, seg 0 (0.204987) -> 24, seg 3 (0.422400)
h("a_pyramidals_48[24].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[172] = new AMPA_syn(0.422400)")
h("a_pyramidals_48[18].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.20498711), syn_SmallNet_pyr_pyr_AMPA_syn[172], 0.0, 0.0, 1.0))")
# Connection 173: 42, seg 0 (0.696759) -> 24, seg 3 (0.983625)
h("a_pyramidals_48[24].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[173] = new AMPA_syn(0.983625)")
h("a_pyramidals_48[42].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.6967586), syn_SmallNet_pyr_pyr_AMPA_syn[173], 0.0, 0.0, 1.0))")
# Connection 174: 31, seg 0 (0.373990) -> 24, seg 3 (0.782592)
h("a_pyramidals_48[24].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[174] = new AMPA_syn(0.782592)")
h("a_pyramidals_48[31].soma a_pyramidals_48[24].synlist.append(new NetCon(&v(0.37398994), syn_SmallNet_pyr_pyr_AMPA_syn[174], 0.0, 0.0, 1.0))")
# Connection 175: 42, seg 0 (0.269061) -> 25, seg 3 (0.292623)
h("a_pyramidals_48[25].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[175] = new AMPA_syn(0.292623)")
h("a_pyramidals_48[42].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.2690606), syn_SmallNet_pyr_pyr_AMPA_syn[175], 0.0, 0.0, 1.0))")
# Connection 176: 20, seg 0 (0.133336) -> 25, seg 3 (0.524121)
h("a_pyramidals_48[25].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[176] = new AMPA_syn(0.524121)")
h("a_pyramidals_48[20].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.13333648), syn_SmallNet_pyr_pyr_AMPA_syn[176], 0.0, 0.0, 1.0))")
# Connection 177: 35, seg 0 (0.701243) -> 25, seg 3 (0.598637)
h("a_pyramidals_48[25].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[177] = new AMPA_syn(0.598637)")
h("a_pyramidals_48[35].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.7012425), syn_SmallNet_pyr_pyr_AMPA_syn[177], 0.0, 0.0, 1.0))")
# Connection 178: 11, seg 0 (0.761766) -> 25, seg 3 (0.680388)
h("a_pyramidals_48[25].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[178] = new AMPA_syn(0.680388)")
h("a_pyramidals_48[11].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.7617659), syn_SmallNet_pyr_pyr_AMPA_syn[178], 0.0, 0.0, 1.0))")
# Connection 179: 32, seg 0 (0.058970) -> 25, seg 3 (0.790828)
h("a_pyramidals_48[25].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[179] = new AMPA_syn(0.790828)")
h("a_pyramidals_48[32].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.058970213), syn_SmallNet_pyr_pyr_AMPA_syn[179], 0.0, 0.0, 1.0))")
# Connection 180: 38, seg 0 (0.373468) -> 25, seg 3 (0.528930)
h("a_pyramidals_48[25].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[180] = new AMPA_syn(0.528930)")
h("a_pyramidals_48[38].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.37346816), syn_SmallNet_pyr_pyr_AMPA_syn[180], 0.0, 0.0, 1.0))")
# Connection 181: 13, seg 0 (0.491392) -> 25, seg 3 (0.897832)
h("a_pyramidals_48[25].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[181] = new AMPA_syn(0.897832)")
h("a_pyramidals_48[13].soma a_pyramidals_48[25].synlist.append(new NetCon(&v(0.49139154), syn_SmallNet_pyr_pyr_AMPA_syn[181], 0.0, 0.0, 1.0))")
# Connection 182: 38, seg 0 (0.039781) -> 26, seg 3 (0.970161)
h("a_pyramidals_48[26].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[182] = new AMPA_syn(0.970161)")
h("a_pyramidals_48[38].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.03978145), syn_SmallNet_pyr_pyr_AMPA_syn[182], 0.0, 0.0, 1.0))")
# Connection 183: 40, seg 0 (0.073178) -> 26, seg 3 (0.559737)
h("a_pyramidals_48[26].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[183] = new AMPA_syn(0.559737)")
h("a_pyramidals_48[40].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.073177814), syn_SmallNet_pyr_pyr_AMPA_syn[183], 0.0, 0.0, 1.0))")
# Connection 184: 16, seg 0 (0.865501) -> 26, seg 3 (0.750894)
h("a_pyramidals_48[26].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[184] = new AMPA_syn(0.750894)")
h("a_pyramidals_48[16].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.8655008), syn_SmallNet_pyr_pyr_AMPA_syn[184], 0.0, 0.0, 1.0))")
# Connection 185: 4, seg 0 (0.954299) -> 26, seg 3 (0.694432)
h("a_pyramidals_48[26].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[185] = new AMPA_syn(0.694432)")
h("a_pyramidals_48[4].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.9542993), syn_SmallNet_pyr_pyr_AMPA_syn[185], 0.0, 0.0, 1.0))")
# Connection 186: 22, seg 0 (0.248708) -> 26, seg 3 (0.631652)
h("a_pyramidals_48[26].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[186] = new AMPA_syn(0.631652)")
h("a_pyramidals_48[22].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.24870765), syn_SmallNet_pyr_pyr_AMPA_syn[186], 0.0, 0.0, 1.0))")
# Connection 187: 43, seg 0 (0.492081) -> 26, seg 3 (0.940473)
h("a_pyramidals_48[26].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[187] = new AMPA_syn(0.940473)")
h("a_pyramidals_48[43].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.49208057), syn_SmallNet_pyr_pyr_AMPA_syn[187], 0.0, 0.0, 1.0))")
# Connection 188: 30, seg 0 (0.832894) -> 26, seg 3 (0.077230)
h("a_pyramidals_48[26].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[188] = new AMPA_syn(0.077230)")
h("a_pyramidals_48[30].soma a_pyramidals_48[26].synlist.append(new NetCon(&v(0.83289415), syn_SmallNet_pyr_pyr_AMPA_syn[188], 0.0, 0.0, 1.0))")
# Connection 189: 4, seg 0 (0.404911) -> 27, seg 3 (0.142955)
h("a_pyramidals_48[27].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[189] = new AMPA_syn(0.142955)")
h("a_pyramidals_48[4].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.40491122), syn_SmallNet_pyr_pyr_AMPA_syn[189], 0.0, 0.0, 1.0))")
# Connection 190: 39, seg 0 (0.403887) -> 27, seg 3 (0.650573)
h("a_pyramidals_48[27].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[190] = new AMPA_syn(0.650573)")
h("a_pyramidals_48[39].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.40388662), syn_SmallNet_pyr_pyr_AMPA_syn[190], 0.0, 0.0, 1.0))")
# Connection 191: 43, seg 0 (0.908325) -> 27, seg 3 (0.236300)
h("a_pyramidals_48[27].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[191] = new AMPA_syn(0.236300)")
h("a_pyramidals_48[43].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.9083249), syn_SmallNet_pyr_pyr_AMPA_syn[191], 0.0, 0.0, 1.0))")
# Connection 192: 26, seg 0 (0.950119) -> 27, seg 3 (0.279205)
h("a_pyramidals_48[27].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[192] = new AMPA_syn(0.279205)")
h("a_pyramidals_48[26].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.95011866), syn_SmallNet_pyr_pyr_AMPA_syn[192], 0.0, 0.0, 1.0))")
# Connection 193: 1, seg 0 (0.980064) -> 27, seg 3 (0.247887)
h("a_pyramidals_48[27].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[193] = new AMPA_syn(0.247887)")
h("a_pyramidals_48[1].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.98006374), syn_SmallNet_pyr_pyr_AMPA_syn[193], 0.0, 0.0, 1.0))")
# Connection 194: 23, seg 0 (0.651464) -> 27, seg 3 (0.985644)
h("a_pyramidals_48[27].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[194] = new AMPA_syn(0.985644)")
h("a_pyramidals_48[23].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.65146375), syn_SmallNet_pyr_pyr_AMPA_syn[194], 0.0, 0.0, 1.0))")
# Connection 195: 3, seg 0 (0.674962) -> 27, seg 3 (0.877470)
h("a_pyramidals_48[27].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[195] = new AMPA_syn(0.877470)")
h("a_pyramidals_48[3].soma a_pyramidals_48[27].synlist.append(new NetCon(&v(0.67496234), syn_SmallNet_pyr_pyr_AMPA_syn[195], 0.0, 0.0, 1.0))")
# Connection 196: 13, seg 0 (0.263200) -> 28, seg 3 (0.613082)
h("a_pyramidals_48[28].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[196] = new AMPA_syn(0.613082)")
h("a_pyramidals_48[13].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.26319993), syn_SmallNet_pyr_pyr_AMPA_syn[196], 0.0, 0.0, 1.0))")
# Connection 197: 43, seg 0 (0.111618) -> 28, seg 3 (0.488109)
h("a_pyramidals_48[28].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[197] = new AMPA_syn(0.488109)")
h("a_pyramidals_48[43].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.11161822), syn_SmallNet_pyr_pyr_AMPA_syn[197], 0.0, 0.0, 1.0))")
# Connection 198: 21, seg 0 (0.430827) -> 28, seg 3 (0.646760)
h("a_pyramidals_48[28].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[198] = new AMPA_syn(0.646760)")
h("a_pyramidals_48[21].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.43082654), syn_SmallNet_pyr_pyr_AMPA_syn[198], 0.0, 0.0, 1.0))")
# Connection 199: 24, seg 0 (0.838099) -> 28, seg 3 (0.149640)
h("a_pyramidals_48[28].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[199] = new AMPA_syn(0.149640)")
h("a_pyramidals_48[24].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.838099), syn_SmallNet_pyr_pyr_AMPA_syn[199], 0.0, 0.0, 1.0))")
# Connection 200: 17, seg 0 (0.013143) -> 28, seg 3 (0.032417)
h("a_pyramidals_48[28].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[200] = new AMPA_syn(0.032417)")
h("a_pyramidals_48[17].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.013142705), syn_SmallNet_pyr_pyr_AMPA_syn[200], 0.0, 0.0, 1.0))")
# Connection 201: 31, seg 0 (0.848963) -> 28, seg 3 (0.365775)
h("a_pyramidals_48[28].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[201] = new AMPA_syn(0.365775)")
h("a_pyramidals_48[31].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.8489629), syn_SmallNet_pyr_pyr_AMPA_syn[201], 0.0, 0.0, 1.0))")
# Connection 202: 29, seg 0 (0.069541) -> 28, seg 3 (0.703640)
h("a_pyramidals_48[28].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[202] = new AMPA_syn(0.703640)")
h("a_pyramidals_48[29].soma a_pyramidals_48[28].synlist.append(new NetCon(&v(0.0695405), syn_SmallNet_pyr_pyr_AMPA_syn[202], 0.0, 0.0, 1.0))")
# Connection 203: 38, seg 0 (0.790958) -> 29, seg 3 (0.274451)
h("a_pyramidals_48[29].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[203] = new AMPA_syn(0.274451)")
h("a_pyramidals_48[38].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.7909575), syn_SmallNet_pyr_pyr_AMPA_syn[203], 0.0, 0.0, 1.0))")
# Connection 204: 30, seg 0 (0.402247) -> 29, seg 3 (0.215297)
h("a_pyramidals_48[29].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[204] = new AMPA_syn(0.215297)")
h("a_pyramidals_48[30].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.40224653), syn_SmallNet_pyr_pyr_AMPA_syn[204], 0.0, 0.0, 1.0))")
# Connection 205: 39, seg 0 (0.453711) -> 29, seg 3 (0.397554)
h("a_pyramidals_48[29].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[205] = new AMPA_syn(0.397554)")
h("a_pyramidals_48[39].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.45371085), syn_SmallNet_pyr_pyr_AMPA_syn[205], 0.0, 0.0, 1.0))")
# Connection 206: 26, seg 0 (0.971242) -> 29, seg 3 (0.941880)
h("a_pyramidals_48[29].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[206] = new AMPA_syn(0.941880)")
h("a_pyramidals_48[26].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.97124165), syn_SmallNet_pyr_pyr_AMPA_syn[206], 0.0, 0.0, 1.0))")
# Connection 207: 6, seg 0 (0.139531) -> 29, seg 3 (0.800108)
h("a_pyramidals_48[29].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[207] = new AMPA_syn(0.800108)")
h("a_pyramidals_48[6].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.13953072), syn_SmallNet_pyr_pyr_AMPA_syn[207], 0.0, 0.0, 1.0))")
# Connection 208: 14, seg 0 (0.579869) -> 29, seg 3 (0.666334)
h("a_pyramidals_48[29].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[208] = new AMPA_syn(0.666334)")
h("a_pyramidals_48[14].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.5798692), syn_SmallNet_pyr_pyr_AMPA_syn[208], 0.0, 0.0, 1.0))")
# Connection 209: 17, seg 0 (0.961284) -> 29, seg 3 (0.567462)
h("a_pyramidals_48[29].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[209] = new AMPA_syn(0.567462)")
h("a_pyramidals_48[17].soma a_pyramidals_48[29].synlist.append(new NetCon(&v(0.9612844), syn_SmallNet_pyr_pyr_AMPA_syn[209], 0.0, 0.0, 1.0))")
# Connection 210: 14, seg 0 (0.244952) -> 30, seg 3 (0.541739)
h("a_pyramidals_48[30].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[210] = new AMPA_syn(0.541739)")
h("a_pyramidals_48[14].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.24495178), syn_SmallNet_pyr_pyr_AMPA_syn[210], 0.0, 0.0, 1.0))")
# Connection 211: 7, seg 0 (0.669462) -> 30, seg 3 (0.978518)
h("a_pyramidals_48[30].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[211] = new AMPA_syn(0.978518)")
h("a_pyramidals_48[7].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.6694618), syn_SmallNet_pyr_pyr_AMPA_syn[211], 0.0, 0.0, 1.0))")
# Connection 212: 39, seg 0 (0.072560) -> 30, seg 3 (0.055781)
h("a_pyramidals_48[30].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[212] = new AMPA_syn(0.055781)")
h("a_pyramidals_48[39].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.07256037), syn_SmallNet_pyr_pyr_AMPA_syn[212], 0.0, 0.0, 1.0))")
# Connection 213: 37, seg 0 (0.404306) -> 30, seg 3 (0.869321)
h("a_pyramidals_48[30].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[213] = new AMPA_syn(0.869321)")
h("a_pyramidals_48[37].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.40430617), syn_SmallNet_pyr_pyr_AMPA_syn[213], 0.0, 0.0, 1.0))")
# Connection 214: 24, seg 0 (0.045978) -> 30, seg 3 (0.191369)
h("a_pyramidals_48[30].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[214] = new AMPA_syn(0.191369)")
h("a_pyramidals_48[24].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.04597789), syn_SmallNet_pyr_pyr_AMPA_syn[214], 0.0, 0.0, 1.0))")
# Connection 215: 11, seg 0 (0.900033) -> 30, seg 3 (0.609100)
h("a_pyramidals_48[30].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[215] = new AMPA_syn(0.609100)")
h("a_pyramidals_48[11].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.9000326), syn_SmallNet_pyr_pyr_AMPA_syn[215], 0.0, 0.0, 1.0))")
# Connection 216: 29, seg 0 (0.540385) -> 30, seg 3 (0.170134)
h("a_pyramidals_48[30].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[216] = new AMPA_syn(0.170134)")
h("a_pyramidals_48[29].soma a_pyramidals_48[30].synlist.append(new NetCon(&v(0.5403847), syn_SmallNet_pyr_pyr_AMPA_syn[216], 0.0, 0.0, 1.0))")
# Connection 217: 11, seg 0 (0.700435) -> 31, seg 3 (0.487738)
h("a_pyramidals_48[31].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[217] = new AMPA_syn(0.487738)")
h("a_pyramidals_48[11].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.7004354), syn_SmallNet_pyr_pyr_AMPA_syn[217], 0.0, 0.0, 1.0))")
# Connection 218: 10, seg 0 (0.045016) -> 31, seg 3 (0.163752)
h("a_pyramidals_48[31].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[218] = new AMPA_syn(0.163752)")
h("a_pyramidals_48[10].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.04501629), syn_SmallNet_pyr_pyr_AMPA_syn[218], 0.0, 0.0, 1.0))")
# Connection 219: 42, seg 0 (0.380136) -> 31, seg 3 (0.139048)
h("a_pyramidals_48[31].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[219] = new AMPA_syn(0.139048)")
h("a_pyramidals_48[42].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.3801359), syn_SmallNet_pyr_pyr_AMPA_syn[219], 0.0, 0.0, 1.0))")
# Connection 220: 40, seg 0 (0.817613) -> 31, seg 3 (0.720032)
h("a_pyramidals_48[31].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[220] = new AMPA_syn(0.720032)")
h("a_pyramidals_48[40].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.8176125), syn_SmallNet_pyr_pyr_AMPA_syn[220], 0.0, 0.0, 1.0))")
# Connection 221: 3, seg 0 (0.907134) -> 31, seg 3 (0.662754)
h("a_pyramidals_48[31].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[221] = new AMPA_syn(0.662754)")
h("a_pyramidals_48[3].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.907134), syn_SmallNet_pyr_pyr_AMPA_syn[221], 0.0, 0.0, 1.0))")
# Connection 222: 13, seg 0 (0.196629) -> 31, seg 3 (0.982022)
h("a_pyramidals_48[31].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[222] = new AMPA_syn(0.982022)")
h("a_pyramidals_48[13].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.1966291), syn_SmallNet_pyr_pyr_AMPA_syn[222], 0.0, 0.0, 1.0))")
# Connection 223: 12, seg 0 (0.302484) -> 31, seg 3 (0.688111)
h("a_pyramidals_48[31].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[223] = new AMPA_syn(0.688111)")
h("a_pyramidals_48[12].soma a_pyramidals_48[31].synlist.append(new NetCon(&v(0.30248433), syn_SmallNet_pyr_pyr_AMPA_syn[223], 0.0, 0.0, 1.0))")
# Connection 224: 18, seg 0 (0.890663) -> 32, seg 3 (0.216336)
h("a_pyramidals_48[32].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[224] = new AMPA_syn(0.216336)")
h("a_pyramidals_48[18].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.89066255), syn_SmallNet_pyr_pyr_AMPA_syn[224], 0.0, 0.0, 1.0))")
# Connection 225: 40, seg 0 (0.354035) -> 32, seg 3 (0.674653)
h("a_pyramidals_48[32].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[225] = new AMPA_syn(0.674653)")
h("a_pyramidals_48[40].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.35403496), syn_SmallNet_pyr_pyr_AMPA_syn[225], 0.0, 0.0, 1.0))")
# Connection 226: 10, seg 0 (0.279386) -> 32, seg 3 (0.112374)
h("a_pyramidals_48[32].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[226] = new AMPA_syn(0.112374)")
h("a_pyramidals_48[10].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.2793864), syn_SmallNet_pyr_pyr_AMPA_syn[226], 0.0, 0.0, 1.0))")
# Connection 227: 7, seg 0 (0.050321) -> 32, seg 3 (0.152723)
h("a_pyramidals_48[32].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[227] = new AMPA_syn(0.152723)")
h("a_pyramidals_48[7].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.050320625), syn_SmallNet_pyr_pyr_AMPA_syn[227], 0.0, 0.0, 1.0))")
# Connection 228: 13, seg 0 (0.209898) -> 32, seg 3 (0.431169)
h("a_pyramidals_48[32].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[228] = new AMPA_syn(0.431169)")
h("a_pyramidals_48[13].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.20989805), syn_SmallNet_pyr_pyr_AMPA_syn[228], 0.0, 0.0, 1.0))")
# Connection 229: 37, seg 0 (0.030302) -> 32, seg 3 (0.391791)
h("a_pyramidals_48[32].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[229] = new AMPA_syn(0.391791)")
h("a_pyramidals_48[37].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.030301988), syn_SmallNet_pyr_pyr_AMPA_syn[229], 0.0, 0.0, 1.0))")
# Connection 230: 31, seg 0 (0.513005) -> 32, seg 3 (0.928034)
h("a_pyramidals_48[32].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[230] = new AMPA_syn(0.928034)")
h("a_pyramidals_48[31].soma a_pyramidals_48[32].synlist.append(new NetCon(&v(0.5130046), syn_SmallNet_pyr_pyr_AMPA_syn[230], 0.0, 0.0, 1.0))")
# Connection 231: 11, seg 0 (0.938186) -> 33, seg 3 (0.601694)
h("a_pyramidals_48[33].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[231] = new AMPA_syn(0.601694)")
h("a_pyramidals_48[11].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.9381859), syn_SmallNet_pyr_pyr_AMPA_syn[231], 0.0, 0.0, 1.0))")
# Connection 232: 43, seg 0 (0.062753) -> 33, seg 3 (0.089192)
h("a_pyramidals_48[33].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[232] = new AMPA_syn(0.089192)")
h("a_pyramidals_48[43].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.06275344), syn_SmallNet_pyr_pyr_AMPA_syn[232], 0.0, 0.0, 1.0))")
# Connection 233: 45, seg 0 (0.294484) -> 33, seg 3 (0.205006)
h("a_pyramidals_48[33].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[233] = new AMPA_syn(0.205006)")
h("a_pyramidals_48[45].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.29448402), syn_SmallNet_pyr_pyr_AMPA_syn[233], 0.0, 0.0, 1.0))")
# Connection 234: 36, seg 0 (0.503660) -> 33, seg 3 (0.307808)
h("a_pyramidals_48[33].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[234] = new AMPA_syn(0.307808)")
h("a_pyramidals_48[36].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.50366026), syn_SmallNet_pyr_pyr_AMPA_syn[234], 0.0, 0.0, 1.0))")
# Connection 235: 46, seg 0 (0.764981) -> 33, seg 3 (0.322033)
h("a_pyramidals_48[33].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[235] = new AMPA_syn(0.322033)")
h("a_pyramidals_48[46].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.7649809), syn_SmallNet_pyr_pyr_AMPA_syn[235], 0.0, 0.0, 1.0))")
# Connection 236: 10, seg 0 (0.221482) -> 33, seg 3 (0.853899)
h("a_pyramidals_48[33].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[236] = new AMPA_syn(0.853899)")
h("a_pyramidals_48[10].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.22148204), syn_SmallNet_pyr_pyr_AMPA_syn[236], 0.0, 0.0, 1.0))")
# Connection 237: 17, seg 0 (0.166235) -> 33, seg 3 (0.669860)
h("a_pyramidals_48[33].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[237] = new AMPA_syn(0.669860)")
h("a_pyramidals_48[17].soma a_pyramidals_48[33].synlist.append(new NetCon(&v(0.16623509), syn_SmallNet_pyr_pyr_AMPA_syn[237], 0.0, 0.0, 1.0))")
# Connection 238: 40, seg 0 (0.961216) -> 34, seg 3 (0.339603)
h("a_pyramidals_48[34].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[238] = new AMPA_syn(0.339603)")
h("a_pyramidals_48[40].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.961216), syn_SmallNet_pyr_pyr_AMPA_syn[238], 0.0, 0.0, 1.0))")
# Connection 239: 2, seg 0 (0.512511) -> 34, seg 3 (0.523748)
h("a_pyramidals_48[34].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[239] = new AMPA_syn(0.523748)")
h("a_pyramidals_48[2].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.512511), syn_SmallNet_pyr_pyr_AMPA_syn[239], 0.0, 0.0, 1.0))")
# Connection 240: 8, seg 0 (0.527692) -> 34, seg 3 (0.796003)
h("a_pyramidals_48[34].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[240] = new AMPA_syn(0.796003)")
h("a_pyramidals_48[8].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.5276918), syn_SmallNet_pyr_pyr_AMPA_syn[240], 0.0, 0.0, 1.0))")
# Connection 241: 36, seg 0 (0.229382) -> 34, seg 3 (0.223236)
h("a_pyramidals_48[34].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[241] = new AMPA_syn(0.223236)")
h("a_pyramidals_48[36].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.22938234), syn_SmallNet_pyr_pyr_AMPA_syn[241], 0.0, 0.0, 1.0))")
# Connection 242: 25, seg 0 (0.590731) -> 34, seg 3 (0.460510)
h("a_pyramidals_48[34].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[242] = new AMPA_syn(0.460510)")
h("a_pyramidals_48[25].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.59073055), syn_SmallNet_pyr_pyr_AMPA_syn[242], 0.0, 0.0, 1.0))")
# Connection 243: 22, seg 0 (0.543165) -> 34, seg 3 (0.268678)
h("a_pyramidals_48[34].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[243] = new AMPA_syn(0.268678)")
h("a_pyramidals_48[22].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.54316497), syn_SmallNet_pyr_pyr_AMPA_syn[243], 0.0, 0.0, 1.0))")
# Connection 244: 11, seg 0 (0.751767) -> 34, seg 3 (0.498142)
h("a_pyramidals_48[34].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[244] = new AMPA_syn(0.498142)")
h("a_pyramidals_48[11].soma a_pyramidals_48[34].synlist.append(new NetCon(&v(0.7517668), syn_SmallNet_pyr_pyr_AMPA_syn[244], 0.0, 0.0, 1.0))")
# Connection 245: 39, seg 0 (0.870277) -> 35, seg 3 (0.555743)
h("a_pyramidals_48[35].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[245] = new AMPA_syn(0.555743)")
h("a_pyramidals_48[39].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.87027705), syn_SmallNet_pyr_pyr_AMPA_syn[245], 0.0, 0.0, 1.0))")
# Connection 246: 32, seg 0 (0.278351) -> 35, seg 3 (0.097604)
h("a_pyramidals_48[35].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[246] = new AMPA_syn(0.097604)")
h("a_pyramidals_48[32].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.27835137), syn_SmallNet_pyr_pyr_AMPA_syn[246], 0.0, 0.0, 1.0))")
# Connection 247: 6, seg 0 (0.878090) -> 35, seg 3 (0.148807)
h("a_pyramidals_48[35].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[247] = new AMPA_syn(0.148807)")
h("a_pyramidals_48[6].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.87809044), syn_SmallNet_pyr_pyr_AMPA_syn[247], 0.0, 0.0, 1.0))")
# Connection 248: 17, seg 0 (0.204062) -> 35, seg 3 (0.102960)
h("a_pyramidals_48[35].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[248] = new AMPA_syn(0.102960)")
h("a_pyramidals_48[17].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.20406228), syn_SmallNet_pyr_pyr_AMPA_syn[248], 0.0, 0.0, 1.0))")
# Connection 249: 23, seg 0 (0.974759) -> 35, seg 3 (0.694610)
h("a_pyramidals_48[35].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[249] = new AMPA_syn(0.694610)")
h("a_pyramidals_48[23].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.9747593), syn_SmallNet_pyr_pyr_AMPA_syn[249], 0.0, 0.0, 1.0))")
# Connection 250: 46, seg 0 (0.212438) -> 35, seg 3 (0.230402)
h("a_pyramidals_48[35].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[250] = new AMPA_syn(0.230402)")
h("a_pyramidals_48[46].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.21243829), syn_SmallNet_pyr_pyr_AMPA_syn[250], 0.0, 0.0, 1.0))")
# Connection 251: 18, seg 0 (0.881779) -> 35, seg 3 (0.424217)
h("a_pyramidals_48[35].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[251] = new AMPA_syn(0.424217)")
h("a_pyramidals_48[18].soma a_pyramidals_48[35].synlist.append(new NetCon(&v(0.88177925), syn_SmallNet_pyr_pyr_AMPA_syn[251], 0.0, 0.0, 1.0))")
# Connection 252: 20, seg 0 (0.261422) -> 36, seg 3 (0.999961)
h("a_pyramidals_48[36].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[252] = new AMPA_syn(0.999961)")
h("a_pyramidals_48[20].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.26142228), syn_SmallNet_pyr_pyr_AMPA_syn[252], 0.0, 0.0, 1.0))")
# Connection 253: 15, seg 0 (0.229657) -> 36, seg 3 (0.437683)
h("a_pyramidals_48[36].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[253] = new AMPA_syn(0.437683)")
h("a_pyramidals_48[15].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.22965747), syn_SmallNet_pyr_pyr_AMPA_syn[253], 0.0, 0.0, 1.0))")
# Connection 254: 8, seg 0 (0.203884) -> 36, seg 3 (0.634228)
h("a_pyramidals_48[36].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[254] = new AMPA_syn(0.634228)")
h("a_pyramidals_48[8].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.20388448), syn_SmallNet_pyr_pyr_AMPA_syn[254], 0.0, 0.0, 1.0))")
# Connection 255: 25, seg 0 (0.647456) -> 36, seg 3 (0.996146)
h("a_pyramidals_48[36].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[255] = new AMPA_syn(0.996146)")
h("a_pyramidals_48[25].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.647456), syn_SmallNet_pyr_pyr_AMPA_syn[255], 0.0, 0.0, 1.0))")
# Connection 256: 10, seg 0 (0.149346) -> 36, seg 3 (0.919122)
h("a_pyramidals_48[36].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[256] = new AMPA_syn(0.919122)")
h("a_pyramidals_48[10].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.14934552), syn_SmallNet_pyr_pyr_AMPA_syn[256], 0.0, 0.0, 1.0))")
# Connection 257: 22, seg 0 (0.669426) -> 36, seg 3 (0.122766)
h("a_pyramidals_48[36].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[257] = new AMPA_syn(0.122766)")
h("a_pyramidals_48[22].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.66942585), syn_SmallNet_pyr_pyr_AMPA_syn[257], 0.0, 0.0, 1.0))")
# Connection 258: 28, seg 0 (0.034479) -> 36, seg 3 (0.203555)
h("a_pyramidals_48[36].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[258] = new AMPA_syn(0.203555)")
h("a_pyramidals_48[28].soma a_pyramidals_48[36].synlist.append(new NetCon(&v(0.03447944), syn_SmallNet_pyr_pyr_AMPA_syn[258], 0.0, 0.0, 1.0))")
# Connection 259: 40, seg 0 (0.349760) -> 37, seg 3 (0.760138)
h("a_pyramidals_48[37].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[259] = new AMPA_syn(0.760138)")
h("a_pyramidals_48[40].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.34975964), syn_SmallNet_pyr_pyr_AMPA_syn[259], 0.0, 0.0, 1.0))")
# Connection 260: 9, seg 0 (0.972972) -> 37, seg 3 (0.823652)
h("a_pyramidals_48[37].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[260] = new AMPA_syn(0.823652)")
h("a_pyramidals_48[9].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.97297245), syn_SmallNet_pyr_pyr_AMPA_syn[260], 0.0, 0.0, 1.0))")
# Connection 261: 42, seg 0 (0.741131) -> 37, seg 3 (0.908212)
h("a_pyramidals_48[37].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[261] = new AMPA_syn(0.908212)")
h("a_pyramidals_48[42].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.7411314), syn_SmallNet_pyr_pyr_AMPA_syn[261], 0.0, 0.0, 1.0))")
# Connection 262: 10, seg 0 (0.316445) -> 37, seg 3 (0.462251)
h("a_pyramidals_48[37].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[262] = new AMPA_syn(0.462251)")
h("a_pyramidals_48[10].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.3164447), syn_SmallNet_pyr_pyr_AMPA_syn[262], 0.0, 0.0, 1.0))")
# Connection 263: 21, seg 0 (0.862785) -> 37, seg 3 (0.657007)
h("a_pyramidals_48[37].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[263] = new AMPA_syn(0.657007)")
h("a_pyramidals_48[21].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.8627846), syn_SmallNet_pyr_pyr_AMPA_syn[263], 0.0, 0.0, 1.0))")
# Connection 264: 20, seg 0 (0.201530) -> 37, seg 3 (0.376135)
h("a_pyramidals_48[37].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[264] = new AMPA_syn(0.376135)")
h("a_pyramidals_48[20].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.20153022), syn_SmallNet_pyr_pyr_AMPA_syn[264], 0.0, 0.0, 1.0))")
# Connection 265: 35, seg 0 (0.600572) -> 37, seg 3 (0.543892)
h("a_pyramidals_48[37].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[265] = new AMPA_syn(0.543892)")
h("a_pyramidals_48[35].soma a_pyramidals_48[37].synlist.append(new NetCon(&v(0.600572), syn_SmallNet_pyr_pyr_AMPA_syn[265], 0.0, 0.0, 1.0))")
# Connection 266: 24, seg 0 (0.494342) -> 38, seg 3 (0.719091)
h("a_pyramidals_48[38].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[266] = new AMPA_syn(0.719091)")
h("a_pyramidals_48[24].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.4943418), syn_SmallNet_pyr_pyr_AMPA_syn[266], 0.0, 0.0, 1.0))")
# Connection 267: 17, seg 0 (0.107951) -> 38, seg 3 (0.954620)
h("a_pyramidals_48[38].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[267] = new AMPA_syn(0.954620)")
h("a_pyramidals_48[17].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.10795069), syn_SmallNet_pyr_pyr_AMPA_syn[267], 0.0, 0.0, 1.0))")
# Connection 268: 14, seg 0 (0.389825) -> 38, seg 3 (0.797818)
h("a_pyramidals_48[38].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[268] = new AMPA_syn(0.797818)")
h("a_pyramidals_48[14].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.3898251), syn_SmallNet_pyr_pyr_AMPA_syn[268], 0.0, 0.0, 1.0))")
# Connection 269: 25, seg 0 (0.775042) -> 38, seg 3 (0.711860)
h("a_pyramidals_48[38].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[269] = new AMPA_syn(0.711860)")
h("a_pyramidals_48[25].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.77504164), syn_SmallNet_pyr_pyr_AMPA_syn[269], 0.0, 0.0, 1.0))")
# Connection 270: 28, seg 0 (0.965975) -> 38, seg 3 (0.015361)
h("a_pyramidals_48[38].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[270] = new AMPA_syn(0.015361)")
h("a_pyramidals_48[28].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.96597534), syn_SmallNet_pyr_pyr_AMPA_syn[270], 0.0, 0.0, 1.0))")
# Connection 271: 22, seg 0 (0.382660) -> 38, seg 3 (0.210442)
h("a_pyramidals_48[38].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[271] = new AMPA_syn(0.210442)")
h("a_pyramidals_48[22].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.38266015), syn_SmallNet_pyr_pyr_AMPA_syn[271], 0.0, 0.0, 1.0))")
# Connection 272: 36, seg 0 (0.887970) -> 38, seg 3 (0.395174)
h("a_pyramidals_48[38].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[272] = new AMPA_syn(0.395174)")
h("a_pyramidals_48[36].soma a_pyramidals_48[38].synlist.append(new NetCon(&v(0.88796985), syn_SmallNet_pyr_pyr_AMPA_syn[272], 0.0, 0.0, 1.0))")
# Connection 273: 12, seg 0 (0.833270) -> 39, seg 3 (0.119896)
h("a_pyramidals_48[39].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[273] = new AMPA_syn(0.119896)")
h("a_pyramidals_48[12].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.8332701), syn_SmallNet_pyr_pyr_AMPA_syn[273], 0.0, 0.0, 1.0))")
# Connection 274: 38, seg 0 (0.114645) -> 39, seg 3 (0.366464)
h("a_pyramidals_48[39].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[274] = new AMPA_syn(0.366464)")
h("a_pyramidals_48[38].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.114644706), syn_SmallNet_pyr_pyr_AMPA_syn[274], 0.0, 0.0, 1.0))")
# Connection 275: 46, seg 0 (0.780063) -> 39, seg 3 (0.429806)
h("a_pyramidals_48[39].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[275] = new AMPA_syn(0.429806)")
h("a_pyramidals_48[46].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.78006303), syn_SmallNet_pyr_pyr_AMPA_syn[275], 0.0, 0.0, 1.0))")
# Connection 276: 14, seg 0 (0.083813) -> 39, seg 3 (0.227663)
h("a_pyramidals_48[39].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[276] = new AMPA_syn(0.227663)")
h("a_pyramidals_48[14].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.083812654), syn_SmallNet_pyr_pyr_AMPA_syn[276], 0.0, 0.0, 1.0))")
# Connection 277: 28, seg 0 (0.717299) -> 39, seg 3 (0.157208)
h("a_pyramidals_48[39].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[277] = new AMPA_syn(0.157208)")
h("a_pyramidals_48[28].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.71729916), syn_SmallNet_pyr_pyr_AMPA_syn[277], 0.0, 0.0, 1.0))")
# Connection 278: 40, seg 0 (0.757864) -> 39, seg 3 (0.906234)
h("a_pyramidals_48[39].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[278] = new AMPA_syn(0.906234)")
h("a_pyramidals_48[40].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.7578642), syn_SmallNet_pyr_pyr_AMPA_syn[278], 0.0, 0.0, 1.0))")
# Connection 279: 30, seg 0 (0.204641) -> 39, seg 3 (0.440216)
h("a_pyramidals_48[39].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[279] = new AMPA_syn(0.440216)")
h("a_pyramidals_48[30].soma a_pyramidals_48[39].synlist.append(new NetCon(&v(0.20464128), syn_SmallNet_pyr_pyr_AMPA_syn[279], 0.0, 0.0, 1.0))")
# Connection 280: 28, seg 0 (0.081111) -> 40, seg 3 (0.775727)
h("a_pyramidals_48[40].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[280] = new AMPA_syn(0.775727)")
h("a_pyramidals_48[28].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.081110775), syn_SmallNet_pyr_pyr_AMPA_syn[280], 0.0, 0.0, 1.0))")
# Connection 281: 7, seg 0 (0.518965) -> 40, seg 3 (0.765490)
h("a_pyramidals_48[40].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[281] = new AMPA_syn(0.765490)")
h("a_pyramidals_48[7].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.5189645), syn_SmallNet_pyr_pyr_AMPA_syn[281], 0.0, 0.0, 1.0))")
# Connection 282: 32, seg 0 (0.789266) -> 40, seg 3 (0.800599)
h("a_pyramidals_48[40].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[282] = new AMPA_syn(0.800599)")
h("a_pyramidals_48[32].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.7892655), syn_SmallNet_pyr_pyr_AMPA_syn[282], 0.0, 0.0, 1.0))")
# Connection 283: 9, seg 0 (0.676462) -> 40, seg 3 (0.238307)
h("a_pyramidals_48[40].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[283] = new AMPA_syn(0.238307)")
h("a_pyramidals_48[9].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.6764624), syn_SmallNet_pyr_pyr_AMPA_syn[283], 0.0, 0.0, 1.0))")
# Connection 284: 45, seg 0 (0.030378) -> 40, seg 3 (0.111218)
h("a_pyramidals_48[40].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[284] = new AMPA_syn(0.111218)")
h("a_pyramidals_48[45].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.030377924), syn_SmallNet_pyr_pyr_AMPA_syn[284], 0.0, 0.0, 1.0))")
# Connection 285: 27, seg 0 (0.293170) -> 40, seg 3 (0.465996)
h("a_pyramidals_48[40].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[285] = new AMPA_syn(0.465996)")
h("a_pyramidals_48[27].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.29316998), syn_SmallNet_pyr_pyr_AMPA_syn[285], 0.0, 0.0, 1.0))")
# Connection 286: 35, seg 0 (0.064700) -> 40, seg 3 (0.282371)
h("a_pyramidals_48[40].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[286] = new AMPA_syn(0.282371)")
h("a_pyramidals_48[35].soma a_pyramidals_48[40].synlist.append(new NetCon(&v(0.06469989), syn_SmallNet_pyr_pyr_AMPA_syn[286], 0.0, 0.0, 1.0))")
# Connection 287: 23, seg 0 (0.736067) -> 41, seg 3 (0.180953)
h("a_pyramidals_48[41].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[287] = new AMPA_syn(0.180953)")
h("a_pyramidals_48[23].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.73606724), syn_SmallNet_pyr_pyr_AMPA_syn[287], 0.0, 0.0, 1.0))")
# Connection 288: 34, seg 0 (0.388290) -> 41, seg 3 (0.766422)
h("a_pyramidals_48[41].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[288] = new AMPA_syn(0.766422)")
h("a_pyramidals_48[34].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.38828963), syn_SmallNet_pyr_pyr_AMPA_syn[288], 0.0, 0.0, 1.0))")
# Connection 289: 22, seg 0 (0.620667) -> 41, seg 3 (0.408596)
h("a_pyramidals_48[41].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[289] = new AMPA_syn(0.408596)")
h("a_pyramidals_48[22].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.6206667), syn_SmallNet_pyr_pyr_AMPA_syn[289], 0.0, 0.0, 1.0))")
# Connection 290: 44, seg 0 (0.088273) -> 41, seg 3 (0.381545)
h("a_pyramidals_48[41].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[290] = new AMPA_syn(0.381545)")
h("a_pyramidals_48[44].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.08827275), syn_SmallNet_pyr_pyr_AMPA_syn[290], 0.0, 0.0, 1.0))")
# Connection 291: 10, seg 0 (0.207460) -> 41, seg 3 (0.321389)
h("a_pyramidals_48[41].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[291] = new AMPA_syn(0.321389)")
h("a_pyramidals_48[10].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.20745963), syn_SmallNet_pyr_pyr_AMPA_syn[291], 0.0, 0.0, 1.0))")
# Connection 292: 25, seg 0 (0.257323) -> 41, seg 3 (0.333205)
h("a_pyramidals_48[41].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[292] = new AMPA_syn(0.333205)")
h("a_pyramidals_48[25].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.2573229), syn_SmallNet_pyr_pyr_AMPA_syn[292], 0.0, 0.0, 1.0))")
# Connection 293: 3, seg 0 (0.522486) -> 41, seg 3 (0.375205)
h("a_pyramidals_48[41].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[293] = new AMPA_syn(0.375205)")
h("a_pyramidals_48[3].soma a_pyramidals_48[41].synlist.append(new NetCon(&v(0.5224856), syn_SmallNet_pyr_pyr_AMPA_syn[293], 0.0, 0.0, 1.0))")
# Connection 294: 12, seg 0 (0.074331) -> 42, seg 3 (0.964804)
h("a_pyramidals_48[42].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[294] = new AMPA_syn(0.964804)")
h("a_pyramidals_48[12].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.07433081), syn_SmallNet_pyr_pyr_AMPA_syn[294], 0.0, 0.0, 1.0))")
# Connection 295: 43, seg 0 (0.409678) -> 42, seg 3 (0.025816)
h("a_pyramidals_48[42].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[295] = new AMPA_syn(0.025816)")
h("a_pyramidals_48[43].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.40967822), syn_SmallNet_pyr_pyr_AMPA_syn[295], 0.0, 0.0, 1.0))")
# Connection 296: 38, seg 0 (0.918514) -> 42, seg 3 (0.673011)
h("a_pyramidals_48[42].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[296] = new AMPA_syn(0.673011)")
h("a_pyramidals_48[38].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.9185139), syn_SmallNet_pyr_pyr_AMPA_syn[296], 0.0, 0.0, 1.0))")
# Connection 297: 1, seg 0 (0.725064) -> 42, seg 3 (0.641952)
h("a_pyramidals_48[42].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[297] = new AMPA_syn(0.641952)")
h("a_pyramidals_48[1].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.7250636), syn_SmallNet_pyr_pyr_AMPA_syn[297], 0.0, 0.0, 1.0))")
# Connection 298: 25, seg 0 (0.101940) -> 42, seg 3 (0.197133)
h("a_pyramidals_48[42].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[298] = new AMPA_syn(0.197133)")
h("a_pyramidals_48[25].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.101940095), syn_SmallNet_pyr_pyr_AMPA_syn[298], 0.0, 0.0, 1.0))")
# Connection 299: 3, seg 0 (0.979809) -> 42, seg 3 (0.726419)
h("a_pyramidals_48[42].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[299] = new AMPA_syn(0.726419)")
h("a_pyramidals_48[3].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.9798087), syn_SmallNet_pyr_pyr_AMPA_syn[299], 0.0, 0.0, 1.0))")
# Connection 300: 16, seg 0 (0.538739) -> 42, seg 3 (0.690729)
h("a_pyramidals_48[42].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[300] = new AMPA_syn(0.690729)")
h("a_pyramidals_48[16].soma a_pyramidals_48[42].synlist.append(new NetCon(&v(0.5387391), syn_SmallNet_pyr_pyr_AMPA_syn[300], 0.0, 0.0, 1.0))")
# Connection 301: 42, seg 0 (0.893024) -> 43, seg 3 (0.513875)
h("a_pyramidals_48[43].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[301] = new AMPA_syn(0.513875)")
h("a_pyramidals_48[42].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.8930242), syn_SmallNet_pyr_pyr_AMPA_syn[301], 0.0, 0.0, 1.0))")
# Connection 302: 39, seg 0 (0.052388) -> 43, seg 3 (0.023943)
h("a_pyramidals_48[43].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[302] = new AMPA_syn(0.023943)")
h("a_pyramidals_48[39].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.052388012), syn_SmallNet_pyr_pyr_AMPA_syn[302], 0.0, 0.0, 1.0))")
# Connection 303: 46, seg 0 (0.451864) -> 43, seg 3 (0.763226)
h("a_pyramidals_48[43].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[303] = new AMPA_syn(0.763226)")
h("a_pyramidals_48[46].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.4518637), syn_SmallNet_pyr_pyr_AMPA_syn[303], 0.0, 0.0, 1.0))")
# Connection 304: 26, seg 0 (0.105777) -> 43, seg 3 (0.614405)
h("a_pyramidals_48[43].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[304] = new AMPA_syn(0.614405)")
h("a_pyramidals_48[26].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.105777085), syn_SmallNet_pyr_pyr_AMPA_syn[304], 0.0, 0.0, 1.0))")
# Connection 305: 27, seg 0 (0.250319) -> 43, seg 3 (0.228736)
h("a_pyramidals_48[43].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[305] = new AMPA_syn(0.228736)")
h("a_pyramidals_48[27].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.25031853), syn_SmallNet_pyr_pyr_AMPA_syn[305], 0.0, 0.0, 1.0))")
# Connection 306: 36, seg 0 (0.967411) -> 43, seg 3 (0.209049)
h("a_pyramidals_48[43].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[306] = new AMPA_syn(0.209049)")
h("a_pyramidals_48[36].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.9674108), syn_SmallNet_pyr_pyr_AMPA_syn[306], 0.0, 0.0, 1.0))")
# Connection 307: 13, seg 0 (0.829278) -> 43, seg 3 (0.935202)
h("a_pyramidals_48[43].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[307] = new AMPA_syn(0.935202)")
h("a_pyramidals_48[13].soma a_pyramidals_48[43].synlist.append(new NetCon(&v(0.8292777), syn_SmallNet_pyr_pyr_AMPA_syn[307], 0.0, 0.0, 1.0))")
# Connection 308: 13, seg 0 (0.410459) -> 44, seg 3 (0.711032)
h("a_pyramidals_48[44].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[308] = new AMPA_syn(0.711032)")
h("a_pyramidals_48[13].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.41045874), syn_SmallNet_pyr_pyr_AMPA_syn[308], 0.0, 0.0, 1.0))")
# Connection 309: 12, seg 0 (0.852950) -> 44, seg 3 (0.957588)
h("a_pyramidals_48[44].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[309] = new AMPA_syn(0.957588)")
h("a_pyramidals_48[12].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.8529497), syn_SmallNet_pyr_pyr_AMPA_syn[309], 0.0, 0.0, 1.0))")
# Connection 310: 20, seg 0 (0.100789) -> 44, seg 3 (0.815850)
h("a_pyramidals_48[44].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[310] = new AMPA_syn(0.815850)")
h("a_pyramidals_48[20].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.10078859), syn_SmallNet_pyr_pyr_AMPA_syn[310], 0.0, 0.0, 1.0))")
# Connection 311: 26, seg 0 (0.496137) -> 44, seg 3 (0.250493)
h("a_pyramidals_48[44].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[311] = new AMPA_syn(0.250493)")
h("a_pyramidals_48[26].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.4961372), syn_SmallNet_pyr_pyr_AMPA_syn[311], 0.0, 0.0, 1.0))")
# Connection 312: 3, seg 0 (0.960596) -> 44, seg 3 (0.757158)
h("a_pyramidals_48[44].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[312] = new AMPA_syn(0.757158)")
h("a_pyramidals_48[3].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.96059614), syn_SmallNet_pyr_pyr_AMPA_syn[312], 0.0, 0.0, 1.0))")
# Connection 313: 36, seg 0 (0.208383) -> 44, seg 3 (0.367918)
h("a_pyramidals_48[44].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[313] = new AMPA_syn(0.367918)")
h("a_pyramidals_48[36].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.20838344), syn_SmallNet_pyr_pyr_AMPA_syn[313], 0.0, 0.0, 1.0))")
# Connection 314: 42, seg 0 (0.527080) -> 44, seg 3 (0.174498)
h("a_pyramidals_48[44].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[314] = new AMPA_syn(0.174498)")
h("a_pyramidals_48[42].soma a_pyramidals_48[44].synlist.append(new NetCon(&v(0.52707994), syn_SmallNet_pyr_pyr_AMPA_syn[314], 0.0, 0.0, 1.0))")
# Connection 315: 34, seg 0 (0.896081) -> 45, seg 3 (0.503327)
h("a_pyramidals_48[45].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[315] = new AMPA_syn(0.503327)")
h("a_pyramidals_48[34].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.89608115), syn_SmallNet_pyr_pyr_AMPA_syn[315], 0.0, 0.0, 1.0))")
# Connection 316: 2, seg 0 (0.533412) -> 45, seg 3 (0.036741)
h("a_pyramidals_48[45].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[316] = new AMPA_syn(0.036741)")
h("a_pyramidals_48[2].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.5334115), syn_SmallNet_pyr_pyr_AMPA_syn[316], 0.0, 0.0, 1.0))")
# Connection 317: 39, seg 0 (0.325415) -> 45, seg 3 (0.020383)
h("a_pyramidals_48[45].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[317] = new AMPA_syn(0.020383)")
h("a_pyramidals_48[39].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.32541507), syn_SmallNet_pyr_pyr_AMPA_syn[317], 0.0, 0.0, 1.0))")
# Connection 318: 47, seg 0 (0.235414) -> 45, seg 3 (0.429490)
h("a_pyramidals_48[45].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[318] = new AMPA_syn(0.429490)")
h("a_pyramidals_48[47].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.23541397), syn_SmallNet_pyr_pyr_AMPA_syn[318], 0.0, 0.0, 1.0))")
# Connection 319: 15, seg 0 (0.638735) -> 45, seg 3 (0.012862)
h("a_pyramidals_48[45].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[319] = new AMPA_syn(0.012862)")
h("a_pyramidals_48[15].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.6387353), syn_SmallNet_pyr_pyr_AMPA_syn[319], 0.0, 0.0, 1.0))")
# Connection 320: 37, seg 0 (0.356641) -> 45, seg 3 (0.308324)
h("a_pyramidals_48[45].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[320] = new AMPA_syn(0.308324)")
h("a_pyramidals_48[37].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.3566411), syn_SmallNet_pyr_pyr_AMPA_syn[320], 0.0, 0.0, 1.0))")
# Connection 321: 24, seg 0 (0.654963) -> 45, seg 3 (0.761174)
h("a_pyramidals_48[45].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[321] = new AMPA_syn(0.761174)")
h("a_pyramidals_48[24].soma a_pyramidals_48[45].synlist.append(new NetCon(&v(0.65496254), syn_SmallNet_pyr_pyr_AMPA_syn[321], 0.0, 0.0, 1.0))")
# Connection 322: 33, seg 0 (0.099334) -> 46, seg 3 (0.443513)
h("a_pyramidals_48[46].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[322] = new AMPA_syn(0.443513)")
h("a_pyramidals_48[33].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.0993343), syn_SmallNet_pyr_pyr_AMPA_syn[322], 0.0, 0.0, 1.0))")
# Connection 323: 24, seg 0 (0.959308) -> 46, seg 3 (0.881139)
h("a_pyramidals_48[46].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[323] = new AMPA_syn(0.881139)")
h("a_pyramidals_48[24].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.9593079), syn_SmallNet_pyr_pyr_AMPA_syn[323], 0.0, 0.0, 1.0))")
# Connection 324: 1, seg 0 (0.064126) -> 46, seg 3 (0.241207)
h("a_pyramidals_48[46].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[324] = new AMPA_syn(0.241207)")
h("a_pyramidals_48[1].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.06412631), syn_SmallNet_pyr_pyr_AMPA_syn[324], 0.0, 0.0, 1.0))")
# Connection 325: 35, seg 0 (0.966777) -> 46, seg 3 (0.349851)
h("a_pyramidals_48[46].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[325] = new AMPA_syn(0.349851)")
h("a_pyramidals_48[35].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.9667772), syn_SmallNet_pyr_pyr_AMPA_syn[325], 0.0, 0.0, 1.0))")
# Connection 326: 21, seg 0 (0.959948) -> 46, seg 3 (0.074073)
h("a_pyramidals_48[46].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[326] = new AMPA_syn(0.074073)")
h("a_pyramidals_48[21].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.9599483), syn_SmallNet_pyr_pyr_AMPA_syn[326], 0.0, 0.0, 1.0))")
# Connection 327: 12, seg 0 (0.994413) -> 46, seg 3 (0.063003)
h("a_pyramidals_48[46].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[327] = new AMPA_syn(0.063003)")
h("a_pyramidals_48[12].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.9944133), syn_SmallNet_pyr_pyr_AMPA_syn[327], 0.0, 0.0, 1.0))")
# Connection 328: 41, seg 0 (0.061625) -> 46, seg 3 (0.662453)
h("a_pyramidals_48[46].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[328] = new AMPA_syn(0.662453)")
h("a_pyramidals_48[41].soma a_pyramidals_48[46].synlist.append(new NetCon(&v(0.061624944), syn_SmallNet_pyr_pyr_AMPA_syn[328], 0.0, 0.0, 1.0))")
# Connection 329: 11, seg 0 (0.876222) -> 47, seg 3 (0.879103)
h("a_pyramidals_48[47].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[329] = new AMPA_syn(0.879103)")
h("a_pyramidals_48[11].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.8762219), syn_SmallNet_pyr_pyr_AMPA_syn[329], 0.0, 0.0, 1.0))")
# Connection 330: 16, seg 0 (0.817098) -> 47, seg 3 (0.675312)
h("a_pyramidals_48[47].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[330] = new AMPA_syn(0.675312)")
h("a_pyramidals_48[16].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.81709784), syn_SmallNet_pyr_pyr_AMPA_syn[330], 0.0, 0.0, 1.0))")
# Connection 331: 12, seg 0 (0.454416) -> 47, seg 3 (0.127329)
h("a_pyramidals_48[47].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[331] = new AMPA_syn(0.127329)")
h("a_pyramidals_48[12].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.45441592), syn_SmallNet_pyr_pyr_AMPA_syn[331], 0.0, 0.0, 1.0))")
# Connection 332: 4, seg 0 (0.295756) -> 47, seg 3 (0.751381)
h("a_pyramidals_48[47].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[332] = new AMPA_syn(0.751381)")
h("a_pyramidals_48[4].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.2957561), syn_SmallNet_pyr_pyr_AMPA_syn[332], 0.0, 0.0, 1.0))")
# Connection 333: 15, seg 0 (0.246154) -> 47, seg 3 (0.022809)
h("a_pyramidals_48[47].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[333] = new AMPA_syn(0.022809)")
h("a_pyramidals_48[15].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.24615383), syn_SmallNet_pyr_pyr_AMPA_syn[333], 0.0, 0.0, 1.0))")
# Connection 334: 17, seg 0 (0.508128) -> 47, seg 3 (0.296264)
h("a_pyramidals_48[47].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[334] = new AMPA_syn(0.296264)")
h("a_pyramidals_48[17].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.50812805), syn_SmallNet_pyr_pyr_AMPA_syn[334], 0.0, 0.0, 1.0))")
# Connection 335: 2, seg 0 (0.060588) -> 47, seg 3 (0.859754)
h("a_pyramidals_48[47].apical3 syn_SmallNet_pyr_pyr_AMPA_syn[335] = new AMPA_syn(0.859754)")
h("a_pyramidals_48[2].soma a_pyramidals_48[47].synlist.append(new NetCon(&v(0.060587943), syn_SmallNet_pyr_pyr_AMPA_syn[335], 0.0, 0.0, 1.0))")
# Adding projection: SmallNet_pyr_bask, from pyramidals_48 to baskets_12 with synapse AMPA_syn_inh, 252 connection(s)
h("objectvar syn_SmallNet_pyr_bask_AMPA_syn_inh[252]")
# Connection 0: 1, seg 0 (0.897339) -> 0, seg 1 (0.094951)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[0] = new AMPA_syn_inh(0.094951)")
h("a_pyramidals_48[1].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.8973388), syn_SmallNet_pyr_bask_AMPA_syn_inh[0], 0.0, 0.0, 1.0))")
# Connection 1: 45, seg 0 (0.928107) -> 0, seg 1 (0.577109)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[1] = new AMPA_syn_inh(0.577109)")
h("a_pyramidals_48[45].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.928107), syn_SmallNet_pyr_bask_AMPA_syn_inh[1], 0.0, 0.0, 1.0))")
# Connection 2: 1, seg 0 (0.365366) -> 0, seg 1 (0.815178)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[2] = new AMPA_syn_inh(0.815178)")
h("a_pyramidals_48[1].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.36536598), syn_SmallNet_pyr_bask_AMPA_syn_inh[2], 0.0, 0.0, 1.0))")
# Connection 3: 42, seg 0 (0.099126) -> 0, seg 1 (0.171880)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[3] = new AMPA_syn_inh(0.171880)")
h("a_pyramidals_48[42].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.09912586), syn_SmallNet_pyr_bask_AMPA_syn_inh[3], 0.0, 0.0, 1.0))")
# Connection 4: 30, seg 0 (0.940359) -> 0, seg 1 (0.937216)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[4] = new AMPA_syn_inh(0.937216)")
h("a_pyramidals_48[30].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.94035894), syn_SmallNet_pyr_bask_AMPA_syn_inh[4], 0.0, 0.0, 1.0))")
# Connection 5: 2, seg 0 (0.281860) -> 0, seg 1 (0.045011)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[5] = new AMPA_syn_inh(0.045011)")
h("a_pyramidals_48[2].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.28186047), syn_SmallNet_pyr_bask_AMPA_syn_inh[5], 0.0, 0.0, 1.0))")
# Connection 6: 29, seg 0 (0.213377) -> 0, seg 1 (0.679890)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[6] = new AMPA_syn_inh(0.679890)")
h("a_pyramidals_48[29].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.21337652), syn_SmallNet_pyr_bask_AMPA_syn_inh[6], 0.0, 0.0, 1.0))")
# Connection 7: 21, seg 0 (0.568312) -> 0, seg 1 (0.895162)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[7] = new AMPA_syn_inh(0.895162)")
h("a_pyramidals_48[21].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.5683119), syn_SmallNet_pyr_bask_AMPA_syn_inh[7], 0.0, 0.0, 1.0))")
# Connection 8: 38, seg 0 (0.767636) -> 0, seg 1 (0.582744)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[8] = new AMPA_syn_inh(0.582744)")
h("a_pyramidals_48[38].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.7676357), syn_SmallNet_pyr_bask_AMPA_syn_inh[8], 0.0, 0.0, 1.0))")
# Connection 9: 43, seg 0 (0.647529) -> 0, seg 1 (0.869461)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[9] = new AMPA_syn_inh(0.869461)")
h("a_pyramidals_48[43].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.6475289), syn_SmallNet_pyr_bask_AMPA_syn_inh[9], 0.0, 0.0, 1.0))")
# Connection 10: 20, seg 0 (0.232644) -> 0, seg 1 (0.630194)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[10] = new AMPA_syn_inh(0.630194)")
h("a_pyramidals_48[20].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.2326436), syn_SmallNet_pyr_bask_AMPA_syn_inh[10], 0.0, 0.0, 1.0))")
# Connection 11: 26, seg 0 (0.259588) -> 0, seg 1 (0.538412)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[11] = new AMPA_syn_inh(0.538412)")
h("a_pyramidals_48[26].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.259588), syn_SmallNet_pyr_bask_AMPA_syn_inh[11], 0.0, 0.0, 1.0))")
# Connection 12: 18, seg 0 (0.996928) -> 0, seg 1 (0.275701)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[12] = new AMPA_syn_inh(0.275701)")
h("a_pyramidals_48[18].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.9969277), syn_SmallNet_pyr_bask_AMPA_syn_inh[12], 0.0, 0.0, 1.0))")
# Connection 13: 42, seg 0 (0.449945) -> 0, seg 1 (0.445524)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[13] = new AMPA_syn_inh(0.445524)")
h("a_pyramidals_48[42].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.44994545), syn_SmallNet_pyr_bask_AMPA_syn_inh[13], 0.0, 0.0, 1.0))")
# Connection 14: 21, seg 0 (0.800877) -> 0, seg 1 (0.750893)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[14] = new AMPA_syn_inh(0.750893)")
h("a_pyramidals_48[21].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.8008774), syn_SmallNet_pyr_bask_AMPA_syn_inh[14], 0.0, 0.0, 1.0))")
# Connection 15: 6, seg 0 (0.093915) -> 0, seg 1 (0.297882)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[15] = new AMPA_syn_inh(0.297882)")
h("a_pyramidals_48[6].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.093914986), syn_SmallNet_pyr_bask_AMPA_syn_inh[15], 0.0, 0.0, 1.0))")
# Connection 16: 7, seg 0 (0.698587) -> 0, seg 1 (0.187216)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[16] = new AMPA_syn_inh(0.187216)")
h("a_pyramidals_48[7].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.69858676), syn_SmallNet_pyr_bask_AMPA_syn_inh[16], 0.0, 0.0, 1.0))")
# Connection 17: 40, seg 0 (0.527644) -> 0, seg 1 (0.523449)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[17] = new AMPA_syn_inh(0.523449)")
h("a_pyramidals_48[40].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.5276436), syn_SmallNet_pyr_bask_AMPA_syn_inh[17], 0.0, 0.0, 1.0))")
# Connection 18: 14, seg 0 (0.994856) -> 0, seg 1 (0.077084)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[18] = new AMPA_syn_inh(0.077084)")
h("a_pyramidals_48[14].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.9948565), syn_SmallNet_pyr_bask_AMPA_syn_inh[18], 0.0, 0.0, 1.0))")
# Connection 19: 40, seg 0 (0.706801) -> 0, seg 1 (0.781289)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[19] = new AMPA_syn_inh(0.781289)")
h("a_pyramidals_48[40].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.70680064), syn_SmallNet_pyr_bask_AMPA_syn_inh[19], 0.0, 0.0, 1.0))")
# Connection 20: 21, seg 0 (0.582688) -> 0, seg 1 (0.430303)
h("a_baskets_12[0].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[20] = new AMPA_syn_inh(0.430303)")
h("a_pyramidals_48[21].soma a_baskets_12[0].synlist.append(new NetCon(&v(0.58268833), syn_SmallNet_pyr_bask_AMPA_syn_inh[20], 0.0, 0.0, 1.0))")
# Connection 21: 12, seg 0 (0.974711) -> 1, seg 1 (0.708611)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[21] = new AMPA_syn_inh(0.708611)")
h("a_pyramidals_48[12].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.97471064), syn_SmallNet_pyr_bask_AMPA_syn_inh[21], 0.0, 0.0, 1.0))")
# Connection 22: 10, seg 0 (0.954559) -> 1, seg 1 (0.722530)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[22] = new AMPA_syn_inh(0.722530)")
h("a_pyramidals_48[10].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.9545588), syn_SmallNet_pyr_bask_AMPA_syn_inh[22], 0.0, 0.0, 1.0))")
# Connection 23: 47, seg 0 (0.929581) -> 1, seg 1 (0.824415)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[23] = new AMPA_syn_inh(0.824415)")
h("a_pyramidals_48[47].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.92958057), syn_SmallNet_pyr_bask_AMPA_syn_inh[23], 0.0, 0.0, 1.0))")
# Connection 24: 0, seg 0 (0.712306) -> 1, seg 1 (0.558898)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[24] = new AMPA_syn_inh(0.558898)")
h("a_pyramidals_48[0].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.71230567), syn_SmallNet_pyr_bask_AMPA_syn_inh[24], 0.0, 0.0, 1.0))")
# Connection 25: 0, seg 0 (0.616857) -> 1, seg 1 (0.481301)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[25] = new AMPA_syn_inh(0.481301)")
h("a_pyramidals_48[0].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.6168574), syn_SmallNet_pyr_bask_AMPA_syn_inh[25], 0.0, 0.0, 1.0))")
# Connection 26: 14, seg 0 (0.191628) -> 1, seg 1 (0.411232)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[26] = new AMPA_syn_inh(0.411232)")
h("a_pyramidals_48[14].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.19162786), syn_SmallNet_pyr_bask_AMPA_syn_inh[26], 0.0, 0.0, 1.0))")
# Connection 27: 45, seg 0 (0.287764) -> 1, seg 1 (0.556133)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[27] = new AMPA_syn_inh(0.556133)")
h("a_pyramidals_48[45].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.2877642), syn_SmallNet_pyr_bask_AMPA_syn_inh[27], 0.0, 0.0, 1.0))")
# Connection 28: 21, seg 0 (0.277486) -> 1, seg 1 (0.012926)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[28] = new AMPA_syn_inh(0.012926)")
h("a_pyramidals_48[21].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.27748585), syn_SmallNet_pyr_bask_AMPA_syn_inh[28], 0.0, 0.0, 1.0))")
# Connection 29: 10, seg 0 (0.484521) -> 1, seg 1 (0.828493)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[29] = new AMPA_syn_inh(0.828493)")
h("a_pyramidals_48[10].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.48452055), syn_SmallNet_pyr_bask_AMPA_syn_inh[29], 0.0, 0.0, 1.0))")
# Connection 30: 40, seg 0 (0.338075) -> 1, seg 1 (0.820879)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[30] = new AMPA_syn_inh(0.820879)")
h("a_pyramidals_48[40].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.33807498), syn_SmallNet_pyr_bask_AMPA_syn_inh[30], 0.0, 0.0, 1.0))")
# Connection 31: 45, seg 0 (0.517250) -> 1, seg 1 (0.785142)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[31] = new AMPA_syn_inh(0.785142)")
h("a_pyramidals_48[45].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.51724976), syn_SmallNet_pyr_bask_AMPA_syn_inh[31], 0.0, 0.0, 1.0))")
# Connection 32: 2, seg 0 (0.294077) -> 1, seg 1 (0.153832)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[32] = new AMPA_syn_inh(0.153832)")
h("a_pyramidals_48[2].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.2940765), syn_SmallNet_pyr_bask_AMPA_syn_inh[32], 0.0, 0.0, 1.0))")
# Connection 33: 33, seg 0 (0.493674) -> 1, seg 1 (0.946857)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[33] = new AMPA_syn_inh(0.946857)")
h("a_pyramidals_48[33].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.49367404), syn_SmallNet_pyr_bask_AMPA_syn_inh[33], 0.0, 0.0, 1.0))")
# Connection 34: 12, seg 0 (0.938024) -> 1, seg 1 (0.689628)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[34] = new AMPA_syn_inh(0.689628)")
h("a_pyramidals_48[12].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.9380237), syn_SmallNet_pyr_bask_AMPA_syn_inh[34], 0.0, 0.0, 1.0))")
# Connection 35: 39, seg 0 (0.109517) -> 1, seg 1 (0.688540)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[35] = new AMPA_syn_inh(0.688540)")
h("a_pyramidals_48[39].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.10951692), syn_SmallNet_pyr_bask_AMPA_syn_inh[35], 0.0, 0.0, 1.0))")
# Connection 36: 10, seg 0 (0.444767) -> 1, seg 1 (0.993809)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[36] = new AMPA_syn_inh(0.993809)")
h("a_pyramidals_48[10].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.44476712), syn_SmallNet_pyr_bask_AMPA_syn_inh[36], 0.0, 0.0, 1.0))")
# Connection 37: 46, seg 0 (0.855714) -> 1, seg 1 (0.792239)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[37] = new AMPA_syn_inh(0.792239)")
h("a_pyramidals_48[46].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.8557138), syn_SmallNet_pyr_bask_AMPA_syn_inh[37], 0.0, 0.0, 1.0))")
# Connection 38: 35, seg 0 (0.701866) -> 1, seg 1 (0.621168)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[38] = new AMPA_syn_inh(0.621168)")
h("a_pyramidals_48[35].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.7018662), syn_SmallNet_pyr_bask_AMPA_syn_inh[38], 0.0, 0.0, 1.0))")
# Connection 39: 27, seg 0 (0.042401) -> 1, seg 1 (0.921847)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[39] = new AMPA_syn_inh(0.921847)")
h("a_pyramidals_48[27].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.042401433), syn_SmallNet_pyr_bask_AMPA_syn_inh[39], 0.0, 0.0, 1.0))")
# Connection 40: 29, seg 0 (0.540334) -> 1, seg 1 (0.469799)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[40] = new AMPA_syn_inh(0.469799)")
h("a_pyramidals_48[29].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.5403337), syn_SmallNet_pyr_bask_AMPA_syn_inh[40], 0.0, 0.0, 1.0))")
# Connection 41: 27, seg 0 (0.477117) -> 1, seg 1 (0.813668)
h("a_baskets_12[1].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[41] = new AMPA_syn_inh(0.813668)")
h("a_pyramidals_48[27].soma a_baskets_12[1].synlist.append(new NetCon(&v(0.47711694), syn_SmallNet_pyr_bask_AMPA_syn_inh[41], 0.0, 0.0, 1.0))")
# Connection 42: 25, seg 0 (0.981896) -> 2, seg 1 (0.858656)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[42] = new AMPA_syn_inh(0.858656)")
h("a_pyramidals_48[25].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.98189616), syn_SmallNet_pyr_bask_AMPA_syn_inh[42], 0.0, 0.0, 1.0))")
# Connection 43: 20, seg 0 (0.373003) -> 2, seg 1 (0.557205)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[43] = new AMPA_syn_inh(0.557205)")
h("a_pyramidals_48[20].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.37300307), syn_SmallNet_pyr_bask_AMPA_syn_inh[43], 0.0, 0.0, 1.0))")
# Connection 44: 40, seg 0 (0.552771) -> 2, seg 1 (0.772237)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[44] = new AMPA_syn_inh(0.772237)")
h("a_pyramidals_48[40].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.55277145), syn_SmallNet_pyr_bask_AMPA_syn_inh[44], 0.0, 0.0, 1.0))")
# Connection 45: 32, seg 0 (0.102761) -> 2, seg 1 (0.614172)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[45] = new AMPA_syn_inh(0.614172)")
h("a_pyramidals_48[32].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.10276121), syn_SmallNet_pyr_bask_AMPA_syn_inh[45], 0.0, 0.0, 1.0))")
# Connection 46: 26, seg 0 (0.951661) -> 2, seg 1 (0.631097)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[46] = new AMPA_syn_inh(0.631097)")
h("a_pyramidals_48[26].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.9516609), syn_SmallNet_pyr_bask_AMPA_syn_inh[46], 0.0, 0.0, 1.0))")
# Connection 47: 28, seg 0 (0.987310) -> 2, seg 1 (0.528144)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[47] = new AMPA_syn_inh(0.528144)")
h("a_pyramidals_48[28].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.98730963), syn_SmallNet_pyr_bask_AMPA_syn_inh[47], 0.0, 0.0, 1.0))")
# Connection 48: 38, seg 0 (0.836313) -> 2, seg 1 (0.289884)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[48] = new AMPA_syn_inh(0.289884)")
h("a_pyramidals_48[38].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.8363129), syn_SmallNet_pyr_bask_AMPA_syn_inh[48], 0.0, 0.0, 1.0))")
# Connection 49: 0, seg 0 (0.138388) -> 2, seg 1 (0.668090)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[49] = new AMPA_syn_inh(0.668090)")
h("a_pyramidals_48[0].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.13838774), syn_SmallNet_pyr_bask_AMPA_syn_inh[49], 0.0, 0.0, 1.0))")
# Connection 50: 33, seg 0 (0.235166) -> 2, seg 1 (0.636235)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[50] = new AMPA_syn_inh(0.636235)")
h("a_pyramidals_48[33].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.23516619), syn_SmallNet_pyr_bask_AMPA_syn_inh[50], 0.0, 0.0, 1.0))")
# Connection 51: 5, seg 0 (0.975241) -> 2, seg 1 (0.524322)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[51] = new AMPA_syn_inh(0.524322)")
h("a_pyramidals_48[5].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.9752406), syn_SmallNet_pyr_bask_AMPA_syn_inh[51], 0.0, 0.0, 1.0))")
# Connection 52: 10, seg 0 (0.906664) -> 2, seg 1 (0.766906)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[52] = new AMPA_syn_inh(0.766906)")
h("a_pyramidals_48[10].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.90666354), syn_SmallNet_pyr_bask_AMPA_syn_inh[52], 0.0, 0.0, 1.0))")
# Connection 53: 11, seg 0 (0.751651) -> 2, seg 1 (0.533452)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[53] = new AMPA_syn_inh(0.533452)")
h("a_pyramidals_48[11].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.7516511), syn_SmallNet_pyr_bask_AMPA_syn_inh[53], 0.0, 0.0, 1.0))")
# Connection 54: 6, seg 0 (0.240767) -> 2, seg 1 (0.676068)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[54] = new AMPA_syn_inh(0.676068)")
h("a_pyramidals_48[6].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.240767), syn_SmallNet_pyr_bask_AMPA_syn_inh[54], 0.0, 0.0, 1.0))")
# Connection 55: 47, seg 0 (0.272410) -> 2, seg 1 (0.968684)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[55] = new AMPA_syn_inh(0.968684)")
h("a_pyramidals_48[47].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.27241004), syn_SmallNet_pyr_bask_AMPA_syn_inh[55], 0.0, 0.0, 1.0))")
# Connection 56: 31, seg 0 (0.064442) -> 2, seg 1 (0.236569)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[56] = new AMPA_syn_inh(0.236569)")
h("a_pyramidals_48[31].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.06444234), syn_SmallNet_pyr_bask_AMPA_syn_inh[56], 0.0, 0.0, 1.0))")
# Connection 57: 21, seg 0 (0.900840) -> 2, seg 1 (0.213913)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[57] = new AMPA_syn_inh(0.213913)")
h("a_pyramidals_48[21].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.90083957), syn_SmallNet_pyr_bask_AMPA_syn_inh[57], 0.0, 0.0, 1.0))")
# Connection 58: 11, seg 0 (0.709068) -> 2, seg 1 (0.753454)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[58] = new AMPA_syn_inh(0.753454)")
h("a_pyramidals_48[11].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.7090682), syn_SmallNet_pyr_bask_AMPA_syn_inh[58], 0.0, 0.0, 1.0))")
# Connection 59: 28, seg 0 (0.057738) -> 2, seg 1 (0.925070)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[59] = new AMPA_syn_inh(0.925070)")
h("a_pyramidals_48[28].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.05773765), syn_SmallNet_pyr_bask_AMPA_syn_inh[59], 0.0, 0.0, 1.0))")
# Connection 60: 38, seg 0 (0.718947) -> 2, seg 1 (0.757352)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[60] = new AMPA_syn_inh(0.757352)")
h("a_pyramidals_48[38].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.71894747), syn_SmallNet_pyr_bask_AMPA_syn_inh[60], 0.0, 0.0, 1.0))")
# Connection 61: 29, seg 0 (0.638367) -> 2, seg 1 (0.729665)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[61] = new AMPA_syn_inh(0.729665)")
h("a_pyramidals_48[29].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.6383672), syn_SmallNet_pyr_bask_AMPA_syn_inh[61], 0.0, 0.0, 1.0))")
# Connection 62: 10, seg 0 (0.532289) -> 2, seg 1 (0.153902)
h("a_baskets_12[2].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[62] = new AMPA_syn_inh(0.153902)")
h("a_pyramidals_48[10].soma a_baskets_12[2].synlist.append(new NetCon(&v(0.5322894), syn_SmallNet_pyr_bask_AMPA_syn_inh[62], 0.0, 0.0, 1.0))")
# Connection 63: 8, seg 0 (0.080227) -> 3, seg 1 (0.550819)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[63] = new AMPA_syn_inh(0.550819)")
h("a_pyramidals_48[8].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.080227315), syn_SmallNet_pyr_bask_AMPA_syn_inh[63], 0.0, 0.0, 1.0))")
# Connection 64: 4, seg 0 (0.658018) -> 3, seg 1 (0.350687)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[64] = new AMPA_syn_inh(0.350687)")
h("a_pyramidals_48[4].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.65801805), syn_SmallNet_pyr_bask_AMPA_syn_inh[64], 0.0, 0.0, 1.0))")
# Connection 65: 32, seg 0 (0.254602) -> 3, seg 1 (0.380070)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[65] = new AMPA_syn_inh(0.380070)")
h("a_pyramidals_48[32].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.25460225), syn_SmallNet_pyr_bask_AMPA_syn_inh[65], 0.0, 0.0, 1.0))")
# Connection 66: 38, seg 0 (0.589044) -> 3, seg 1 (0.242937)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[66] = new AMPA_syn_inh(0.242937)")
h("a_pyramidals_48[38].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.58904403), syn_SmallNet_pyr_bask_AMPA_syn_inh[66], 0.0, 0.0, 1.0))")
# Connection 67: 30, seg 0 (0.504300) -> 3, seg 1 (0.222742)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[67] = new AMPA_syn_inh(0.222742)")
h("a_pyramidals_48[30].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.5042998), syn_SmallNet_pyr_bask_AMPA_syn_inh[67], 0.0, 0.0, 1.0))")
# Connection 68: 14, seg 0 (0.805851) -> 3, seg 1 (0.353359)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[68] = new AMPA_syn_inh(0.353359)")
h("a_pyramidals_48[14].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.8058507), syn_SmallNet_pyr_bask_AMPA_syn_inh[68], 0.0, 0.0, 1.0))")
# Connection 69: 4, seg 0 (0.297954) -> 3, seg 1 (0.248163)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[69] = new AMPA_syn_inh(0.248163)")
h("a_pyramidals_48[4].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.29795378), syn_SmallNet_pyr_bask_AMPA_syn_inh[69], 0.0, 0.0, 1.0))")
# Connection 70: 19, seg 0 (0.554949) -> 3, seg 1 (0.208103)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[70] = new AMPA_syn_inh(0.208103)")
h("a_pyramidals_48[19].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.5549486), syn_SmallNet_pyr_bask_AMPA_syn_inh[70], 0.0, 0.0, 1.0))")
# Connection 71: 14, seg 0 (0.890886) -> 3, seg 1 (0.344055)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[71] = new AMPA_syn_inh(0.344055)")
h("a_pyramidals_48[14].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.8908856), syn_SmallNet_pyr_bask_AMPA_syn_inh[71], 0.0, 0.0, 1.0))")
# Connection 72: 11, seg 0 (0.471935) -> 3, seg 1 (0.539425)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[72] = new AMPA_syn_inh(0.539425)")
h("a_pyramidals_48[11].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.4719352), syn_SmallNet_pyr_bask_AMPA_syn_inh[72], 0.0, 0.0, 1.0))")
# Connection 73: 9, seg 0 (0.070203) -> 3, seg 1 (0.881802)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[73] = new AMPA_syn_inh(0.881802)")
h("a_pyramidals_48[9].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.07020348), syn_SmallNet_pyr_bask_AMPA_syn_inh[73], 0.0, 0.0, 1.0))")
# Connection 74: 44, seg 0 (0.584388) -> 3, seg 1 (0.551986)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[74] = new AMPA_syn_inh(0.551986)")
h("a_pyramidals_48[44].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.58438766), syn_SmallNet_pyr_bask_AMPA_syn_inh[74], 0.0, 0.0, 1.0))")
# Connection 75: 2, seg 0 (0.685318) -> 3, seg 1 (0.374495)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[75] = new AMPA_syn_inh(0.374495)")
h("a_pyramidals_48[2].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.68531764), syn_SmallNet_pyr_bask_AMPA_syn_inh[75], 0.0, 0.0, 1.0))")
# Connection 76: 44, seg 0 (0.401468) -> 3, seg 1 (0.333553)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[76] = new AMPA_syn_inh(0.333553)")
h("a_pyramidals_48[44].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.40146828), syn_SmallNet_pyr_bask_AMPA_syn_inh[76], 0.0, 0.0, 1.0))")
# Connection 77: 34, seg 0 (0.052051) -> 3, seg 1 (0.757551)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[77] = new AMPA_syn_inh(0.757551)")
h("a_pyramidals_48[34].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.05205071), syn_SmallNet_pyr_bask_AMPA_syn_inh[77], 0.0, 0.0, 1.0))")
# Connection 78: 25, seg 0 (0.647466) -> 3, seg 1 (0.318888)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[78] = new AMPA_syn_inh(0.318888)")
h("a_pyramidals_48[25].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.6474655), syn_SmallNet_pyr_bask_AMPA_syn_inh[78], 0.0, 0.0, 1.0))")
# Connection 79: 24, seg 0 (0.661545) -> 3, seg 1 (0.027938)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[79] = new AMPA_syn_inh(0.027938)")
h("a_pyramidals_48[24].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.66154474), syn_SmallNet_pyr_bask_AMPA_syn_inh[79], 0.0, 0.0, 1.0))")
# Connection 80: 2, seg 0 (0.443223) -> 3, seg 1 (0.586239)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[80] = new AMPA_syn_inh(0.586239)")
h("a_pyramidals_48[2].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.44322294), syn_SmallNet_pyr_bask_AMPA_syn_inh[80], 0.0, 0.0, 1.0))")
# Connection 81: 42, seg 0 (0.672288) -> 3, seg 1 (0.608158)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[81] = new AMPA_syn_inh(0.608158)")
h("a_pyramidals_48[42].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.6722885), syn_SmallNet_pyr_bask_AMPA_syn_inh[81], 0.0, 0.0, 1.0))")
# Connection 82: 17, seg 0 (0.680906) -> 3, seg 1 (0.318105)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[82] = new AMPA_syn_inh(0.318105)")
h("a_pyramidals_48[17].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.68090624), syn_SmallNet_pyr_bask_AMPA_syn_inh[82], 0.0, 0.0, 1.0))")
# Connection 83: 33, seg 0 (0.508318) -> 3, seg 1 (0.234996)
h("a_baskets_12[3].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[83] = new AMPA_syn_inh(0.234996)")
h("a_pyramidals_48[33].soma a_baskets_12[3].synlist.append(new NetCon(&v(0.508318), syn_SmallNet_pyr_bask_AMPA_syn_inh[83], 0.0, 0.0, 1.0))")
# Connection 84: 36, seg 0 (0.629310) -> 4, seg 1 (0.177332)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[84] = new AMPA_syn_inh(0.177332)")
h("a_pyramidals_48[36].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.6293104), syn_SmallNet_pyr_bask_AMPA_syn_inh[84], 0.0, 0.0, 1.0))")
# Connection 85: 42, seg 0 (0.563908) -> 4, seg 1 (0.286162)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[85] = new AMPA_syn_inh(0.286162)")
h("a_pyramidals_48[42].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.56390774), syn_SmallNet_pyr_bask_AMPA_syn_inh[85], 0.0, 0.0, 1.0))")
# Connection 86: 42, seg 0 (0.651516) -> 4, seg 1 (0.730184)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[86] = new AMPA_syn_inh(0.730184)")
h("a_pyramidals_48[42].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.6515159), syn_SmallNet_pyr_bask_AMPA_syn_inh[86], 0.0, 0.0, 1.0))")
# Connection 87: 47, seg 0 (0.454175) -> 4, seg 1 (0.178128)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[87] = new AMPA_syn_inh(0.178128)")
h("a_pyramidals_48[47].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.454175), syn_SmallNet_pyr_bask_AMPA_syn_inh[87], 0.0, 0.0, 1.0))")
# Connection 88: 35, seg 0 (0.358857) -> 4, seg 1 (0.982891)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[88] = new AMPA_syn_inh(0.982891)")
h("a_pyramidals_48[35].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.35885674), syn_SmallNet_pyr_bask_AMPA_syn_inh[88], 0.0, 0.0, 1.0))")
# Connection 89: 0, seg 0 (0.730048) -> 4, seg 1 (0.905017)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[89] = new AMPA_syn_inh(0.905017)")
h("a_pyramidals_48[0].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.7300485), syn_SmallNet_pyr_bask_AMPA_syn_inh[89], 0.0, 0.0, 1.0))")
# Connection 90: 19, seg 0 (0.011776) -> 4, seg 1 (0.672855)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[90] = new AMPA_syn_inh(0.672855)")
h("a_pyramidals_48[19].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.011776328), syn_SmallNet_pyr_bask_AMPA_syn_inh[90], 0.0, 0.0, 1.0))")
# Connection 91: 21, seg 0 (0.066009) -> 4, seg 1 (0.951158)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[91] = new AMPA_syn_inh(0.951158)")
h("a_pyramidals_48[21].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.06600875), syn_SmallNet_pyr_bask_AMPA_syn_inh[91], 0.0, 0.0, 1.0))")
# Connection 92: 21, seg 0 (0.062497) -> 4, seg 1 (0.916117)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[92] = new AMPA_syn_inh(0.916117)")
h("a_pyramidals_48[21].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.0624969), syn_SmallNet_pyr_bask_AMPA_syn_inh[92], 0.0, 0.0, 1.0))")
# Connection 93: 18, seg 0 (0.009775) -> 4, seg 1 (0.202473)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[93] = new AMPA_syn_inh(0.202473)")
h("a_pyramidals_48[18].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.009775281), syn_SmallNet_pyr_bask_AMPA_syn_inh[93], 0.0, 0.0, 1.0))")
# Connection 94: 4, seg 0 (0.772047) -> 4, seg 1 (0.518454)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[94] = new AMPA_syn_inh(0.518454)")
h("a_pyramidals_48[4].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.7720471), syn_SmallNet_pyr_bask_AMPA_syn_inh[94], 0.0, 0.0, 1.0))")
# Connection 95: 0, seg 0 (0.156903) -> 4, seg 1 (0.573290)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[95] = new AMPA_syn_inh(0.573290)")
h("a_pyramidals_48[0].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.15690327), syn_SmallNet_pyr_bask_AMPA_syn_inh[95], 0.0, 0.0, 1.0))")
# Connection 96: 34, seg 0 (0.736800) -> 4, seg 1 (0.129969)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[96] = new AMPA_syn_inh(0.129969)")
h("a_pyramidals_48[34].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.73679984), syn_SmallNet_pyr_bask_AMPA_syn_inh[96], 0.0, 0.0, 1.0))")
# Connection 97: 40, seg 0 (0.635225) -> 4, seg 1 (0.612864)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[97] = new AMPA_syn_inh(0.612864)")
h("a_pyramidals_48[40].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.6352252), syn_SmallNet_pyr_bask_AMPA_syn_inh[97], 0.0, 0.0, 1.0))")
# Connection 98: 27, seg 0 (0.742439) -> 4, seg 1 (0.937034)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[98] = new AMPA_syn_inh(0.937034)")
h("a_pyramidals_48[27].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.74243915), syn_SmallNet_pyr_bask_AMPA_syn_inh[98], 0.0, 0.0, 1.0))")
# Connection 99: 27, seg 0 (0.873241) -> 4, seg 1 (0.770918)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[99] = new AMPA_syn_inh(0.770918)")
h("a_pyramidals_48[27].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.8732408), syn_SmallNet_pyr_bask_AMPA_syn_inh[99], 0.0, 0.0, 1.0))")
# Connection 100: 22, seg 0 (0.539194) -> 4, seg 1 (0.450345)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[100] = new AMPA_syn_inh(0.450345)")
h("a_pyramidals_48[22].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.5391935), syn_SmallNet_pyr_bask_AMPA_syn_inh[100], 0.0, 0.0, 1.0))")
# Connection 101: 32, seg 0 (0.114199) -> 4, seg 1 (0.416393)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[101] = new AMPA_syn_inh(0.416393)")
h("a_pyramidals_48[32].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.11419928), syn_SmallNet_pyr_bask_AMPA_syn_inh[101], 0.0, 0.0, 1.0))")
# Connection 102: 10, seg 0 (0.176289) -> 4, seg 1 (0.965680)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[102] = new AMPA_syn_inh(0.965680)")
h("a_pyramidals_48[10].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.17628932), syn_SmallNet_pyr_bask_AMPA_syn_inh[102], 0.0, 0.0, 1.0))")
# Connection 103: 8, seg 0 (0.253735) -> 4, seg 1 (0.121998)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[103] = new AMPA_syn_inh(0.121998)")
h("a_pyramidals_48[8].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.2537347), syn_SmallNet_pyr_bask_AMPA_syn_inh[103], 0.0, 0.0, 1.0))")
# Connection 104: 12, seg 0 (0.858451) -> 4, seg 1 (0.457122)
h("a_baskets_12[4].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[104] = new AMPA_syn_inh(0.457122)")
h("a_pyramidals_48[12].soma a_baskets_12[4].synlist.append(new NetCon(&v(0.858451), syn_SmallNet_pyr_bask_AMPA_syn_inh[104], 0.0, 0.0, 1.0))")
# Connection 105: 35, seg 0 (0.871568) -> 5, seg 1 (0.602321)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[105] = new AMPA_syn_inh(0.602321)")
h("a_pyramidals_48[35].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.8715681), syn_SmallNet_pyr_bask_AMPA_syn_inh[105], 0.0, 0.0, 1.0))")
# Connection 106: 41, seg 0 (0.639793) -> 5, seg 1 (0.536025)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[106] = new AMPA_syn_inh(0.536025)")
h("a_pyramidals_48[41].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.639793), syn_SmallNet_pyr_bask_AMPA_syn_inh[106], 0.0, 0.0, 1.0))")
# Connection 107: 34, seg 0 (0.853258) -> 5, seg 1 (0.502481)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[107] = new AMPA_syn_inh(0.502481)")
h("a_pyramidals_48[34].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.8532583), syn_SmallNet_pyr_bask_AMPA_syn_inh[107], 0.0, 0.0, 1.0))")
# Connection 108: 43, seg 0 (0.978988) -> 5, seg 1 (0.717942)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[108] = new AMPA_syn_inh(0.717942)")
h("a_pyramidals_48[43].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.9789884), syn_SmallNet_pyr_bask_AMPA_syn_inh[108], 0.0, 0.0, 1.0))")
# Connection 109: 13, seg 0 (0.114131) -> 5, seg 1 (0.083959)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[109] = new AMPA_syn_inh(0.083959)")
h("a_pyramidals_48[13].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.114130795), syn_SmallNet_pyr_bask_AMPA_syn_inh[109], 0.0, 0.0, 1.0))")
# Connection 110: 44, seg 0 (0.049081) -> 5, seg 1 (0.224031)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[110] = new AMPA_syn_inh(0.224031)")
h("a_pyramidals_48[44].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.049081385), syn_SmallNet_pyr_bask_AMPA_syn_inh[110], 0.0, 0.0, 1.0))")
# Connection 111: 38, seg 0 (0.875773) -> 5, seg 1 (0.869222)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[111] = new AMPA_syn_inh(0.869222)")
h("a_pyramidals_48[38].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.875773), syn_SmallNet_pyr_bask_AMPA_syn_inh[111], 0.0, 0.0, 1.0))")
# Connection 112: 40, seg 0 (0.366652) -> 5, seg 1 (0.737899)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[112] = new AMPA_syn_inh(0.737899)")
h("a_pyramidals_48[40].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.3666525), syn_SmallNet_pyr_bask_AMPA_syn_inh[112], 0.0, 0.0, 1.0))")
# Connection 113: 3, seg 0 (0.931377) -> 5, seg 1 (0.527977)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[113] = new AMPA_syn_inh(0.527977)")
h("a_pyramidals_48[3].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.9313769), syn_SmallNet_pyr_bask_AMPA_syn_inh[113], 0.0, 0.0, 1.0))")
# Connection 114: 26, seg 0 (0.364103) -> 5, seg 1 (0.898691)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[114] = new AMPA_syn_inh(0.898691)")
h("a_pyramidals_48[26].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.36410314), syn_SmallNet_pyr_bask_AMPA_syn_inh[114], 0.0, 0.0, 1.0))")
# Connection 115: 26, seg 0 (0.871301) -> 5, seg 1 (0.017656)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[115] = new AMPA_syn_inh(0.017656)")
h("a_pyramidals_48[26].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.871301), syn_SmallNet_pyr_bask_AMPA_syn_inh[115], 0.0, 0.0, 1.0))")
# Connection 116: 16, seg 0 (0.368756) -> 5, seg 1 (0.557972)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[116] = new AMPA_syn_inh(0.557972)")
h("a_pyramidals_48[16].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.368756), syn_SmallNet_pyr_bask_AMPA_syn_inh[116], 0.0, 0.0, 1.0))")
# Connection 117: 37, seg 0 (0.252894) -> 5, seg 1 (0.997438)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[117] = new AMPA_syn_inh(0.997438)")
h("a_pyramidals_48[37].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.25289434), syn_SmallNet_pyr_bask_AMPA_syn_inh[117], 0.0, 0.0, 1.0))")
# Connection 118: 19, seg 0 (0.862172) -> 5, seg 1 (0.053659)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[118] = new AMPA_syn_inh(0.053659)")
h("a_pyramidals_48[19].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.8621721), syn_SmallNet_pyr_bask_AMPA_syn_inh[118], 0.0, 0.0, 1.0))")
# Connection 119: 45, seg 0 (0.614134) -> 5, seg 1 (0.246064)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[119] = new AMPA_syn_inh(0.246064)")
h("a_pyramidals_48[45].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.61413395), syn_SmallNet_pyr_bask_AMPA_syn_inh[119], 0.0, 0.0, 1.0))")
# Connection 120: 24, seg 0 (0.563139) -> 5, seg 1 (0.316421)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[120] = new AMPA_syn_inh(0.316421)")
h("a_pyramidals_48[24].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.5631389), syn_SmallNet_pyr_bask_AMPA_syn_inh[120], 0.0, 0.0, 1.0))")
# Connection 121: 12, seg 0 (0.605272) -> 5, seg 1 (0.592384)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[121] = new AMPA_syn_inh(0.592384)")
h("a_pyramidals_48[12].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.6052716), syn_SmallNet_pyr_bask_AMPA_syn_inh[121], 0.0, 0.0, 1.0))")
# Connection 122: 4, seg 0 (0.483966) -> 5, seg 1 (0.006549)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[122] = new AMPA_syn_inh(0.006549)")
h("a_pyramidals_48[4].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.4839663), syn_SmallNet_pyr_bask_AMPA_syn_inh[122], 0.0, 0.0, 1.0))")
# Connection 123: 38, seg 0 (0.720071) -> 5, seg 1 (0.693940)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[123] = new AMPA_syn_inh(0.693940)")
h("a_pyramidals_48[38].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.72007054), syn_SmallNet_pyr_bask_AMPA_syn_inh[123], 0.0, 0.0, 1.0))")
# Connection 124: 43, seg 0 (0.353159) -> 5, seg 1 (0.789961)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[124] = new AMPA_syn_inh(0.789961)")
h("a_pyramidals_48[43].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.353159), syn_SmallNet_pyr_bask_AMPA_syn_inh[124], 0.0, 0.0, 1.0))")
# Connection 125: 6, seg 0 (0.659941) -> 5, seg 1 (0.335950)
h("a_baskets_12[5].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[125] = new AMPA_syn_inh(0.335950)")
h("a_pyramidals_48[6].soma a_baskets_12[5].synlist.append(new NetCon(&v(0.6599409), syn_SmallNet_pyr_bask_AMPA_syn_inh[125], 0.0, 0.0, 1.0))")
# Connection 126: 33, seg 0 (0.748945) -> 6, seg 1 (0.677807)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[126] = new AMPA_syn_inh(0.677807)")
h("a_pyramidals_48[33].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.7489449), syn_SmallNet_pyr_bask_AMPA_syn_inh[126], 0.0, 0.0, 1.0))")
# Connection 127: 33, seg 0 (0.790566) -> 6, seg 1 (0.274267)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[127] = new AMPA_syn_inh(0.274267)")
h("a_pyramidals_48[33].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.79056585), syn_SmallNet_pyr_bask_AMPA_syn_inh[127], 0.0, 0.0, 1.0))")
# Connection 128: 24, seg 0 (0.602785) -> 6, seg 1 (0.124298)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[128] = new AMPA_syn_inh(0.124298)")
h("a_pyramidals_48[24].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.6027846), syn_SmallNet_pyr_bask_AMPA_syn_inh[128], 0.0, 0.0, 1.0))")
# Connection 129: 4, seg 0 (0.960588) -> 6, seg 1 (0.277064)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[129] = new AMPA_syn_inh(0.277064)")
h("a_pyramidals_48[4].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.96058846), syn_SmallNet_pyr_bask_AMPA_syn_inh[129], 0.0, 0.0, 1.0))")
# Connection 130: 22, seg 0 (0.770328) -> 6, seg 1 (0.132000)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[130] = new AMPA_syn_inh(0.132000)")
h("a_pyramidals_48[22].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.7703282), syn_SmallNet_pyr_bask_AMPA_syn_inh[130], 0.0, 0.0, 1.0))")
# Connection 131: 10, seg 0 (0.036765) -> 6, seg 1 (0.534666)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[131] = new AMPA_syn_inh(0.534666)")
h("a_pyramidals_48[10].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.036765218), syn_SmallNet_pyr_bask_AMPA_syn_inh[131], 0.0, 0.0, 1.0))")
# Connection 132: 14, seg 0 (0.975290) -> 6, seg 1 (0.924865)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[132] = new AMPA_syn_inh(0.924865)")
h("a_pyramidals_48[14].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.9752905), syn_SmallNet_pyr_bask_AMPA_syn_inh[132], 0.0, 0.0, 1.0))")
# Connection 133: 45, seg 0 (0.445946) -> 6, seg 1 (0.538696)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[133] = new AMPA_syn_inh(0.538696)")
h("a_pyramidals_48[45].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.44594598), syn_SmallNet_pyr_bask_AMPA_syn_inh[133], 0.0, 0.0, 1.0))")
# Connection 134: 4, seg 0 (0.103555) -> 6, seg 1 (0.530809)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[134] = new AMPA_syn_inh(0.530809)")
h("a_pyramidals_48[4].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.103554964), syn_SmallNet_pyr_bask_AMPA_syn_inh[134], 0.0, 0.0, 1.0))")
# Connection 135: 21, seg 0 (0.066363) -> 6, seg 1 (0.828596)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[135] = new AMPA_syn_inh(0.828596)")
h("a_pyramidals_48[21].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.06636292), syn_SmallNet_pyr_bask_AMPA_syn_inh[135], 0.0, 0.0, 1.0))")
# Connection 136: 44, seg 0 (0.189232) -> 6, seg 1 (0.246049)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[136] = new AMPA_syn_inh(0.246049)")
h("a_pyramidals_48[44].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.18923211), syn_SmallNet_pyr_bask_AMPA_syn_inh[136], 0.0, 0.0, 1.0))")
# Connection 137: 20, seg 0 (0.031810) -> 6, seg 1 (0.256217)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[137] = new AMPA_syn_inh(0.256217)")
h("a_pyramidals_48[20].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.031809628), syn_SmallNet_pyr_bask_AMPA_syn_inh[137], 0.0, 0.0, 1.0))")
# Connection 138: 12, seg 0 (0.474969) -> 6, seg 1 (0.145235)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[138] = new AMPA_syn_inh(0.145235)")
h("a_pyramidals_48[12].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.47496945), syn_SmallNet_pyr_bask_AMPA_syn_inh[138], 0.0, 0.0, 1.0))")
# Connection 139: 30, seg 0 (0.675336) -> 6, seg 1 (0.895062)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[139] = new AMPA_syn_inh(0.895062)")
h("a_pyramidals_48[30].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.6753356), syn_SmallNet_pyr_bask_AMPA_syn_inh[139], 0.0, 0.0, 1.0))")
# Connection 140: 1, seg 0 (0.067059) -> 6, seg 1 (0.037931)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[140] = new AMPA_syn_inh(0.037931)")
h("a_pyramidals_48[1].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.0670588), syn_SmallNet_pyr_bask_AMPA_syn_inh[140], 0.0, 0.0, 1.0))")
# Connection 141: 45, seg 0 (0.535194) -> 6, seg 1 (0.743486)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[141] = new AMPA_syn_inh(0.743486)")
h("a_pyramidals_48[45].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.53519374), syn_SmallNet_pyr_bask_AMPA_syn_inh[141], 0.0, 0.0, 1.0))")
# Connection 142: 34, seg 0 (0.844755) -> 6, seg 1 (0.874799)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[142] = new AMPA_syn_inh(0.874799)")
h("a_pyramidals_48[34].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.8447551), syn_SmallNet_pyr_bask_AMPA_syn_inh[142], 0.0, 0.0, 1.0))")
# Connection 143: 36, seg 0 (0.423475) -> 6, seg 1 (0.295587)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[143] = new AMPA_syn_inh(0.295587)")
h("a_pyramidals_48[36].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.42347503), syn_SmallNet_pyr_bask_AMPA_syn_inh[143], 0.0, 0.0, 1.0))")
# Connection 144: 37, seg 0 (0.127464) -> 6, seg 1 (0.389925)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[144] = new AMPA_syn_inh(0.389925)")
h("a_pyramidals_48[37].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.12746394), syn_SmallNet_pyr_bask_AMPA_syn_inh[144], 0.0, 0.0, 1.0))")
# Connection 145: 5, seg 0 (0.173190) -> 6, seg 1 (0.061333)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[145] = new AMPA_syn_inh(0.061333)")
h("a_pyramidals_48[5].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.17319047), syn_SmallNet_pyr_bask_AMPA_syn_inh[145], 0.0, 0.0, 1.0))")
# Connection 146: 24, seg 0 (0.042036) -> 6, seg 1 (0.578983)
h("a_baskets_12[6].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[146] = new AMPA_syn_inh(0.578983)")
h("a_pyramidals_48[24].soma a_baskets_12[6].synlist.append(new NetCon(&v(0.04203564), syn_SmallNet_pyr_bask_AMPA_syn_inh[146], 0.0, 0.0, 1.0))")
# Connection 147: 27, seg 0 (0.570306) -> 7, seg 1 (0.274265)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[147] = new AMPA_syn_inh(0.274265)")
h("a_pyramidals_48[27].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.5703062), syn_SmallNet_pyr_bask_AMPA_syn_inh[147], 0.0, 0.0, 1.0))")
# Connection 148: 26, seg 0 (0.586770) -> 7, seg 1 (0.666708)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[148] = new AMPA_syn_inh(0.666708)")
h("a_pyramidals_48[26].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.5867704), syn_SmallNet_pyr_bask_AMPA_syn_inh[148], 0.0, 0.0, 1.0))")
# Connection 149: 24, seg 0 (0.168323) -> 7, seg 1 (0.507711)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[149] = new AMPA_syn_inh(0.507711)")
h("a_pyramidals_48[24].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.16832298), syn_SmallNet_pyr_bask_AMPA_syn_inh[149], 0.0, 0.0, 1.0))")
# Connection 150: 28, seg 0 (0.199727) -> 7, seg 1 (0.929867)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[150] = new AMPA_syn_inh(0.929867)")
h("a_pyramidals_48[28].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.19972706), syn_SmallNet_pyr_bask_AMPA_syn_inh[150], 0.0, 0.0, 1.0))")
# Connection 151: 37, seg 0 (0.673790) -> 7, seg 1 (0.046219)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[151] = new AMPA_syn_inh(0.046219)")
h("a_pyramidals_48[37].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.6737904), syn_SmallNet_pyr_bask_AMPA_syn_inh[151], 0.0, 0.0, 1.0))")
# Connection 152: 27, seg 0 (0.183602) -> 7, seg 1 (0.938431)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[152] = new AMPA_syn_inh(0.938431)")
h("a_pyramidals_48[27].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.18360227), syn_SmallNet_pyr_bask_AMPA_syn_inh[152], 0.0, 0.0, 1.0))")
# Connection 153: 21, seg 0 (0.169258) -> 7, seg 1 (0.511193)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[153] = new AMPA_syn_inh(0.511193)")
h("a_pyramidals_48[21].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.16925782), syn_SmallNet_pyr_bask_AMPA_syn_inh[153], 0.0, 0.0, 1.0))")
# Connection 154: 8, seg 0 (0.038106) -> 7, seg 1 (0.418610)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[154] = new AMPA_syn_inh(0.418610)")
h("a_pyramidals_48[8].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.038106084), syn_SmallNet_pyr_bask_AMPA_syn_inh[154], 0.0, 0.0, 1.0))")
# Connection 155: 25, seg 0 (0.109956) -> 7, seg 1 (0.509325)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[155] = new AMPA_syn_inh(0.509325)")
h("a_pyramidals_48[25].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.109956026), syn_SmallNet_pyr_bask_AMPA_syn_inh[155], 0.0, 0.0, 1.0))")
# Connection 156: 7, seg 0 (0.842967) -> 7, seg 1 (0.149199)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[156] = new AMPA_syn_inh(0.149199)")
h("a_pyramidals_48[7].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.84296733), syn_SmallNet_pyr_bask_AMPA_syn_inh[156], 0.0, 0.0, 1.0))")
# Connection 157: 20, seg 0 (0.138392) -> 7, seg 1 (0.397049)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[157] = new AMPA_syn_inh(0.397049)")
h("a_pyramidals_48[20].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.13839215), syn_SmallNet_pyr_bask_AMPA_syn_inh[157], 0.0, 0.0, 1.0))")
# Connection 158: 30, seg 0 (0.372023) -> 7, seg 1 (0.300105)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[158] = new AMPA_syn_inh(0.300105)")
h("a_pyramidals_48[30].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.37202317), syn_SmallNet_pyr_bask_AMPA_syn_inh[158], 0.0, 0.0, 1.0))")
# Connection 159: 6, seg 0 (0.743386) -> 7, seg 1 (0.062933)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[159] = new AMPA_syn_inh(0.062933)")
h("a_pyramidals_48[6].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.7433861), syn_SmallNet_pyr_bask_AMPA_syn_inh[159], 0.0, 0.0, 1.0))")
# Connection 160: 18, seg 0 (0.140702) -> 7, seg 1 (0.288958)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[160] = new AMPA_syn_inh(0.288958)")
h("a_pyramidals_48[18].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.14070177), syn_SmallNet_pyr_bask_AMPA_syn_inh[160], 0.0, 0.0, 1.0))")
# Connection 161: 0, seg 0 (0.563109) -> 7, seg 1 (0.461809)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[161] = new AMPA_syn_inh(0.461809)")
h("a_pyramidals_48[0].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.563109), syn_SmallNet_pyr_bask_AMPA_syn_inh[161], 0.0, 0.0, 1.0))")
# Connection 162: 21, seg 0 (0.016955) -> 7, seg 1 (0.332127)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[162] = new AMPA_syn_inh(0.332127)")
h("a_pyramidals_48[21].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.016954899), syn_SmallNet_pyr_bask_AMPA_syn_inh[162], 0.0, 0.0, 1.0))")
# Connection 163: 40, seg 0 (0.486844) -> 7, seg 1 (0.926944)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[163] = new AMPA_syn_inh(0.926944)")
h("a_pyramidals_48[40].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.4868443), syn_SmallNet_pyr_bask_AMPA_syn_inh[163], 0.0, 0.0, 1.0))")
# Connection 164: 17, seg 0 (0.028926) -> 7, seg 1 (0.587694)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[164] = new AMPA_syn_inh(0.587694)")
h("a_pyramidals_48[17].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.028926492), syn_SmallNet_pyr_bask_AMPA_syn_inh[164], 0.0, 0.0, 1.0))")
# Connection 165: 40, seg 0 (0.671854) -> 7, seg 1 (0.489127)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[165] = new AMPA_syn_inh(0.489127)")
h("a_pyramidals_48[40].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.6718543), syn_SmallNet_pyr_bask_AMPA_syn_inh[165], 0.0, 0.0, 1.0))")
# Connection 166: 43, seg 0 (0.060262) -> 7, seg 1 (0.084418)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[166] = new AMPA_syn_inh(0.084418)")
h("a_pyramidals_48[43].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.06026244), syn_SmallNet_pyr_bask_AMPA_syn_inh[166], 0.0, 0.0, 1.0))")
# Connection 167: 34, seg 0 (0.038113) -> 7, seg 1 (0.828037)
h("a_baskets_12[7].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[167] = new AMPA_syn_inh(0.828037)")
h("a_pyramidals_48[34].soma a_baskets_12[7].synlist.append(new NetCon(&v(0.038113296), syn_SmallNet_pyr_bask_AMPA_syn_inh[167], 0.0, 0.0, 1.0))")
# Connection 168: 24, seg 0 (0.721525) -> 8, seg 1 (0.949831)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[168] = new AMPA_syn_inh(0.949831)")
h("a_pyramidals_48[24].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.7215248), syn_SmallNet_pyr_bask_AMPA_syn_inh[168], 0.0, 0.0, 1.0))")
# Connection 169: 24, seg 0 (0.245451) -> 8, seg 1 (0.064618)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[169] = new AMPA_syn_inh(0.064618)")
h("a_pyramidals_48[24].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.2454508), syn_SmallNet_pyr_bask_AMPA_syn_inh[169], 0.0, 0.0, 1.0))")
# Connection 170: 0, seg 0 (0.559576) -> 8, seg 1 (0.770512)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[170] = new AMPA_syn_inh(0.770512)")
h("a_pyramidals_48[0].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.55957603), syn_SmallNet_pyr_bask_AMPA_syn_inh[170], 0.0, 0.0, 1.0))")
# Connection 171: 20, seg 0 (0.412614) -> 8, seg 1 (0.572197)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[171] = new AMPA_syn_inh(0.572197)")
h("a_pyramidals_48[20].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.41261417), syn_SmallNet_pyr_bask_AMPA_syn_inh[171], 0.0, 0.0, 1.0))")
# Connection 172: 9, seg 0 (0.821331) -> 8, seg 1 (0.613034)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[172] = new AMPA_syn_inh(0.613034)")
h("a_pyramidals_48[9].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.8213309), syn_SmallNet_pyr_bask_AMPA_syn_inh[172], 0.0, 0.0, 1.0))")
# Connection 173: 13, seg 0 (0.433416) -> 8, seg 1 (0.894997)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[173] = new AMPA_syn_inh(0.894997)")
h("a_pyramidals_48[13].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.43341553), syn_SmallNet_pyr_bask_AMPA_syn_inh[173], 0.0, 0.0, 1.0))")
# Connection 174: 4, seg 0 (0.490516) -> 8, seg 1 (0.136201)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[174] = new AMPA_syn_inh(0.136201)")
h("a_pyramidals_48[4].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.49051565), syn_SmallNet_pyr_bask_AMPA_syn_inh[174], 0.0, 0.0, 1.0))")
# Connection 175: 28, seg 0 (0.225202) -> 8, seg 1 (0.190854)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[175] = new AMPA_syn_inh(0.190854)")
h("a_pyramidals_48[28].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.2252019), syn_SmallNet_pyr_bask_AMPA_syn_inh[175], 0.0, 0.0, 1.0))")
# Connection 176: 19, seg 0 (0.175265) -> 8, seg 1 (0.330701)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[176] = new AMPA_syn_inh(0.330701)")
h("a_pyramidals_48[19].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.17526543), syn_SmallNet_pyr_bask_AMPA_syn_inh[176], 0.0, 0.0, 1.0))")
# Connection 177: 14, seg 0 (0.149073) -> 8, seg 1 (0.448793)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[177] = new AMPA_syn_inh(0.448793)")
h("a_pyramidals_48[14].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.14907312), syn_SmallNet_pyr_bask_AMPA_syn_inh[177], 0.0, 0.0, 1.0))")
# Connection 178: 6, seg 0 (0.848205) -> 8, seg 1 (0.053290)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[178] = new AMPA_syn_inh(0.053290)")
h("a_pyramidals_48[6].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.84820545), syn_SmallNet_pyr_bask_AMPA_syn_inh[178], 0.0, 0.0, 1.0))")
# Connection 179: 47, seg 0 (0.971702) -> 8, seg 1 (0.540677)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[179] = new AMPA_syn_inh(0.540677)")
h("a_pyramidals_48[47].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.97170186), syn_SmallNet_pyr_bask_AMPA_syn_inh[179], 0.0, 0.0, 1.0))")
# Connection 180: 13, seg 0 (0.969474) -> 8, seg 1 (0.332109)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[180] = new AMPA_syn_inh(0.332109)")
h("a_pyramidals_48[13].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.9694742), syn_SmallNet_pyr_bask_AMPA_syn_inh[180], 0.0, 0.0, 1.0))")
# Connection 181: 31, seg 0 (0.419882) -> 8, seg 1 (0.050733)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[181] = new AMPA_syn_inh(0.050733)")
h("a_pyramidals_48[31].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.41988236), syn_SmallNet_pyr_bask_AMPA_syn_inh[181], 0.0, 0.0, 1.0))")
# Connection 182: 33, seg 0 (0.415350) -> 8, seg 1 (0.479888)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[182] = new AMPA_syn_inh(0.479888)")
h("a_pyramidals_48[33].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.41534972), syn_SmallNet_pyr_bask_AMPA_syn_inh[182], 0.0, 0.0, 1.0))")
# Connection 183: 19, seg 0 (0.762643) -> 8, seg 1 (0.385109)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[183] = new AMPA_syn_inh(0.385109)")
h("a_pyramidals_48[19].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.7626434), syn_SmallNet_pyr_bask_AMPA_syn_inh[183], 0.0, 0.0, 1.0))")
# Connection 184: 38, seg 0 (0.794916) -> 8, seg 1 (0.358610)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[184] = new AMPA_syn_inh(0.358610)")
h("a_pyramidals_48[38].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.79491585), syn_SmallNet_pyr_bask_AMPA_syn_inh[184], 0.0, 0.0, 1.0))")
# Connection 185: 39, seg 0 (0.913962) -> 8, seg 1 (0.564829)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[185] = new AMPA_syn_inh(0.564829)")
h("a_pyramidals_48[39].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.91396224), syn_SmallNet_pyr_bask_AMPA_syn_inh[185], 0.0, 0.0, 1.0))")
# Connection 186: 44, seg 0 (0.844989) -> 8, seg 1 (0.207660)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[186] = new AMPA_syn_inh(0.207660)")
h("a_pyramidals_48[44].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.8449894), syn_SmallNet_pyr_bask_AMPA_syn_inh[186], 0.0, 0.0, 1.0))")
# Connection 187: 12, seg 0 (0.245024) -> 8, seg 1 (0.691515)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[187] = new AMPA_syn_inh(0.691515)")
h("a_pyramidals_48[12].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.24502444), syn_SmallNet_pyr_bask_AMPA_syn_inh[187], 0.0, 0.0, 1.0))")
# Connection 188: 19, seg 0 (0.817960) -> 8, seg 1 (0.268115)
h("a_baskets_12[8].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[188] = new AMPA_syn_inh(0.268115)")
h("a_pyramidals_48[19].soma a_baskets_12[8].synlist.append(new NetCon(&v(0.81795985), syn_SmallNet_pyr_bask_AMPA_syn_inh[188], 0.0, 0.0, 1.0))")
# Connection 189: 26, seg 0 (0.512988) -> 9, seg 1 (0.774585)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[189] = new AMPA_syn_inh(0.774585)")
h("a_pyramidals_48[26].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.5129879), syn_SmallNet_pyr_bask_AMPA_syn_inh[189], 0.0, 0.0, 1.0))")
# Connection 190: 37, seg 0 (0.416295) -> 9, seg 1 (0.905103)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[190] = new AMPA_syn_inh(0.905103)")
h("a_pyramidals_48[37].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.41629505), syn_SmallNet_pyr_bask_AMPA_syn_inh[190], 0.0, 0.0, 1.0))")
# Connection 191: 43, seg 0 (0.473926) -> 9, seg 1 (0.665870)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[191] = new AMPA_syn_inh(0.665870)")
h("a_pyramidals_48[43].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.47392637), syn_SmallNet_pyr_bask_AMPA_syn_inh[191], 0.0, 0.0, 1.0))")
# Connection 192: 28, seg 0 (0.445444) -> 9, seg 1 (0.962162)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[192] = new AMPA_syn_inh(0.962162)")
h("a_pyramidals_48[28].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.44544417), syn_SmallNet_pyr_bask_AMPA_syn_inh[192], 0.0, 0.0, 1.0))")
# Connection 193: 9, seg 0 (0.627515) -> 9, seg 1 (0.841585)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[193] = new AMPA_syn_inh(0.841585)")
h("a_pyramidals_48[9].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.627515), syn_SmallNet_pyr_bask_AMPA_syn_inh[193], 0.0, 0.0, 1.0))")
# Connection 194: 3, seg 0 (0.451611) -> 9, seg 1 (0.951518)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[194] = new AMPA_syn_inh(0.951518)")
h("a_pyramidals_48[3].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.45161062), syn_SmallNet_pyr_bask_AMPA_syn_inh[194], 0.0, 0.0, 1.0))")
# Connection 195: 31, seg 0 (0.659914) -> 9, seg 1 (0.613425)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[195] = new AMPA_syn_inh(0.613425)")
h("a_pyramidals_48[31].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.6599144), syn_SmallNet_pyr_bask_AMPA_syn_inh[195], 0.0, 0.0, 1.0))")
# Connection 196: 27, seg 0 (0.566211) -> 9, seg 1 (0.748627)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[196] = new AMPA_syn_inh(0.748627)")
h("a_pyramidals_48[27].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.56621087), syn_SmallNet_pyr_bask_AMPA_syn_inh[196], 0.0, 0.0, 1.0))")
# Connection 197: 13, seg 0 (0.184819) -> 9, seg 1 (0.922070)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[197] = new AMPA_syn_inh(0.922070)")
h("a_pyramidals_48[13].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.18481869), syn_SmallNet_pyr_bask_AMPA_syn_inh[197], 0.0, 0.0, 1.0))")
# Connection 198: 45, seg 0 (0.824035) -> 9, seg 1 (0.240588)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[198] = new AMPA_syn_inh(0.240588)")
h("a_pyramidals_48[45].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.82403487), syn_SmallNet_pyr_bask_AMPA_syn_inh[198], 0.0, 0.0, 1.0))")
# Connection 199: 11, seg 0 (0.400274) -> 9, seg 1 (0.176992)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[199] = new AMPA_syn_inh(0.176992)")
h("a_pyramidals_48[11].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.40027392), syn_SmallNet_pyr_bask_AMPA_syn_inh[199], 0.0, 0.0, 1.0))")
# Connection 200: 26, seg 0 (0.340181) -> 9, seg 1 (0.096151)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[200] = new AMPA_syn_inh(0.096151)")
h("a_pyramidals_48[26].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.34018075), syn_SmallNet_pyr_bask_AMPA_syn_inh[200], 0.0, 0.0, 1.0))")
# Connection 201: 11, seg 0 (0.202323) -> 9, seg 1 (0.317777)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[201] = new AMPA_syn_inh(0.317777)")
h("a_pyramidals_48[11].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.20232314), syn_SmallNet_pyr_bask_AMPA_syn_inh[201], 0.0, 0.0, 1.0))")
# Connection 202: 3, seg 0 (0.925598) -> 9, seg 1 (0.801545)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[202] = new AMPA_syn_inh(0.801545)")
h("a_pyramidals_48[3].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.9255983), syn_SmallNet_pyr_bask_AMPA_syn_inh[202], 0.0, 0.0, 1.0))")
# Connection 203: 47, seg 0 (0.816514) -> 9, seg 1 (0.654422)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[203] = new AMPA_syn_inh(0.654422)")
h("a_pyramidals_48[47].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.8165145), syn_SmallNet_pyr_bask_AMPA_syn_inh[203], 0.0, 0.0, 1.0))")
# Connection 204: 32, seg 0 (0.772205) -> 9, seg 1 (0.060784)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[204] = new AMPA_syn_inh(0.060784)")
h("a_pyramidals_48[32].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.77220476), syn_SmallNet_pyr_bask_AMPA_syn_inh[204], 0.0, 0.0, 1.0))")
# Connection 205: 43, seg 0 (0.291846) -> 9, seg 1 (0.902026)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[205] = new AMPA_syn_inh(0.902026)")
h("a_pyramidals_48[43].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.2918455), syn_SmallNet_pyr_bask_AMPA_syn_inh[205], 0.0, 0.0, 1.0))")
# Connection 206: 16, seg 0 (0.641907) -> 9, seg 1 (0.760250)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[206] = new AMPA_syn_inh(0.760250)")
h("a_pyramidals_48[16].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.64190674), syn_SmallNet_pyr_bask_AMPA_syn_inh[206], 0.0, 0.0, 1.0))")
# Connection 207: 26, seg 0 (0.065850) -> 9, seg 1 (0.645325)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[207] = new AMPA_syn_inh(0.645325)")
h("a_pyramidals_48[26].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.06584966), syn_SmallNet_pyr_bask_AMPA_syn_inh[207], 0.0, 0.0, 1.0))")
# Connection 208: 16, seg 0 (0.803785) -> 9, seg 1 (0.327596)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[208] = new AMPA_syn_inh(0.327596)")
h("a_pyramidals_48[16].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.80378526), syn_SmallNet_pyr_bask_AMPA_syn_inh[208], 0.0, 0.0, 1.0))")
# Connection 209: 9, seg 0 (0.630755) -> 9, seg 1 (0.011426)
h("a_baskets_12[9].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[209] = new AMPA_syn_inh(0.011426)")
h("a_pyramidals_48[9].soma a_baskets_12[9].synlist.append(new NetCon(&v(0.6307553), syn_SmallNet_pyr_bask_AMPA_syn_inh[209], 0.0, 0.0, 1.0))")
# Connection 210: 34, seg 0 (0.322214) -> 10, seg 1 (0.847100)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[210] = new AMPA_syn_inh(0.847100)")
h("a_pyramidals_48[34].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.32221377), syn_SmallNet_pyr_bask_AMPA_syn_inh[210], 0.0, 0.0, 1.0))")
# Connection 211: 20, seg 0 (0.803542) -> 10, seg 1 (0.108070)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[211] = new AMPA_syn_inh(0.108070)")
h("a_pyramidals_48[20].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.8035423), syn_SmallNet_pyr_bask_AMPA_syn_inh[211], 0.0, 0.0, 1.0))")
# Connection 212: 38, seg 0 (0.155701) -> 10, seg 1 (0.775910)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[212] = new AMPA_syn_inh(0.775910)")
h("a_pyramidals_48[38].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.1557011), syn_SmallNet_pyr_bask_AMPA_syn_inh[212], 0.0, 0.0, 1.0))")
# Connection 213: 37, seg 0 (0.054697) -> 10, seg 1 (0.341069)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[213] = new AMPA_syn_inh(0.341069)")
h("a_pyramidals_48[37].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.054697394), syn_SmallNet_pyr_bask_AMPA_syn_inh[213], 0.0, 0.0, 1.0))")
# Connection 214: 15, seg 0 (0.023345) -> 10, seg 1 (0.592654)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[214] = new AMPA_syn_inh(0.592654)")
h("a_pyramidals_48[15].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.023345113), syn_SmallNet_pyr_bask_AMPA_syn_inh[214], 0.0, 0.0, 1.0))")
# Connection 215: 1, seg 0 (0.163470) -> 10, seg 1 (0.327088)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[215] = new AMPA_syn_inh(0.327088)")
h("a_pyramidals_48[1].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.16346979), syn_SmallNet_pyr_bask_AMPA_syn_inh[215], 0.0, 0.0, 1.0))")
# Connection 216: 39, seg 0 (0.604893) -> 10, seg 1 (0.071543)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[216] = new AMPA_syn_inh(0.071543)")
h("a_pyramidals_48[39].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.60489345), syn_SmallNet_pyr_bask_AMPA_syn_inh[216], 0.0, 0.0, 1.0))")
# Connection 217: 15, seg 0 (0.251072) -> 10, seg 1 (0.487077)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[217] = new AMPA_syn_inh(0.487077)")
h("a_pyramidals_48[15].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.25107235), syn_SmallNet_pyr_bask_AMPA_syn_inh[217], 0.0, 0.0, 1.0))")
# Connection 218: 44, seg 0 (0.817714) -> 10, seg 1 (0.258986)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[218] = new AMPA_syn_inh(0.258986)")
h("a_pyramidals_48[44].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.8177141), syn_SmallNet_pyr_bask_AMPA_syn_inh[218], 0.0, 0.0, 1.0))")
# Connection 219: 42, seg 0 (0.747778) -> 10, seg 1 (0.275105)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[219] = new AMPA_syn_inh(0.275105)")
h("a_pyramidals_48[42].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.74777824), syn_SmallNet_pyr_bask_AMPA_syn_inh[219], 0.0, 0.0, 1.0))")
# Connection 220: 12, seg 0 (0.147831) -> 10, seg 1 (0.947634)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[220] = new AMPA_syn_inh(0.947634)")
h("a_pyramidals_48[12].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.14783102), syn_SmallNet_pyr_bask_AMPA_syn_inh[220], 0.0, 0.0, 1.0))")
# Connection 221: 34, seg 0 (0.200698) -> 10, seg 1 (0.802072)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[221] = new AMPA_syn_inh(0.802072)")
h("a_pyramidals_48[34].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.20069796), syn_SmallNet_pyr_bask_AMPA_syn_inh[221], 0.0, 0.0, 1.0))")
# Connection 222: 5, seg 0 (0.112682) -> 10, seg 1 (0.892953)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[222] = new AMPA_syn_inh(0.892953)")
h("a_pyramidals_48[5].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.112681985), syn_SmallNet_pyr_bask_AMPA_syn_inh[222], 0.0, 0.0, 1.0))")
# Connection 223: 45, seg 0 (0.509180) -> 10, seg 1 (0.304640)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[223] = new AMPA_syn_inh(0.304640)")
h("a_pyramidals_48[45].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.50918007), syn_SmallNet_pyr_bask_AMPA_syn_inh[223], 0.0, 0.0, 1.0))")
# Connection 224: 19, seg 0 (0.630021) -> 10, seg 1 (0.351108)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[224] = new AMPA_syn_inh(0.351108)")
h("a_pyramidals_48[19].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.6300208), syn_SmallNet_pyr_bask_AMPA_syn_inh[224], 0.0, 0.0, 1.0))")
# Connection 225: 42, seg 0 (0.274068) -> 10, seg 1 (0.870640)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[225] = new AMPA_syn_inh(0.870640)")
h("a_pyramidals_48[42].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.27406764), syn_SmallNet_pyr_bask_AMPA_syn_inh[225], 0.0, 0.0, 1.0))")
# Connection 226: 11, seg 0 (0.450465) -> 10, seg 1 (0.099209)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[226] = new AMPA_syn_inh(0.099209)")
h("a_pyramidals_48[11].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.45046455), syn_SmallNet_pyr_bask_AMPA_syn_inh[226], 0.0, 0.0, 1.0))")
# Connection 227: 12, seg 0 (0.541827) -> 10, seg 1 (0.088998)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[227] = new AMPA_syn_inh(0.088998)")
h("a_pyramidals_48[12].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.5418267), syn_SmallNet_pyr_bask_AMPA_syn_inh[227], 0.0, 0.0, 1.0))")
# Connection 228: 26, seg 0 (0.869269) -> 10, seg 1 (0.408094)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[228] = new AMPA_syn_inh(0.408094)")
h("a_pyramidals_48[26].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.8692691), syn_SmallNet_pyr_bask_AMPA_syn_inh[228], 0.0, 0.0, 1.0))")
# Connection 229: 40, seg 0 (0.844818) -> 10, seg 1 (0.674184)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[229] = new AMPA_syn_inh(0.674184)")
h("a_pyramidals_48[40].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.8448175), syn_SmallNet_pyr_bask_AMPA_syn_inh[229], 0.0, 0.0, 1.0))")
# Connection 230: 38, seg 0 (0.411263) -> 10, seg 1 (0.892711)
h("a_baskets_12[10].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[230] = new AMPA_syn_inh(0.892711)")
h("a_pyramidals_48[38].soma a_baskets_12[10].synlist.append(new NetCon(&v(0.41126305), syn_SmallNet_pyr_bask_AMPA_syn_inh[230], 0.0, 0.0, 1.0))")
# Connection 231: 11, seg 0 (0.923626) -> 11, seg 1 (0.748485)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[231] = new AMPA_syn_inh(0.748485)")
h("a_pyramidals_48[11].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.923626), syn_SmallNet_pyr_bask_AMPA_syn_inh[231], 0.0, 0.0, 1.0))")
# Connection 232: 32, seg 0 (0.202525) -> 11, seg 1 (0.461127)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[232] = new AMPA_syn_inh(0.461127)")
h("a_pyramidals_48[32].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.20252454), syn_SmallNet_pyr_bask_AMPA_syn_inh[232], 0.0, 0.0, 1.0))")
# Connection 233: 10, seg 0 (0.152234) -> 11, seg 1 (0.945479)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[233] = new AMPA_syn_inh(0.945479)")
h("a_pyramidals_48[10].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.15223426), syn_SmallNet_pyr_bask_AMPA_syn_inh[233], 0.0, 0.0, 1.0))")
# Connection 234: 0, seg 0 (0.090780) -> 11, seg 1 (0.463695)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[234] = new AMPA_syn_inh(0.463695)")
h("a_pyramidals_48[0].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.09077996), syn_SmallNet_pyr_bask_AMPA_syn_inh[234], 0.0, 0.0, 1.0))")
# Connection 235: 10, seg 0 (0.363645) -> 11, seg 1 (0.307516)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[235] = new AMPA_syn_inh(0.307516)")
h("a_pyramidals_48[10].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.3636455), syn_SmallNet_pyr_bask_AMPA_syn_inh[235], 0.0, 0.0, 1.0))")
# Connection 236: 18, seg 0 (0.298309) -> 11, seg 1 (0.126051)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[236] = new AMPA_syn_inh(0.126051)")
h("a_pyramidals_48[18].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.29830897), syn_SmallNet_pyr_bask_AMPA_syn_inh[236], 0.0, 0.0, 1.0))")
# Connection 237: 17, seg 0 (0.002077) -> 11, seg 1 (0.157572)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[237] = new AMPA_syn_inh(0.157572)")
h("a_pyramidals_48[17].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.0020766854), syn_SmallNet_pyr_bask_AMPA_syn_inh[237], 0.0, 0.0, 1.0))")
# Connection 238: 18, seg 0 (0.277125) -> 11, seg 1 (0.768249)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[238] = new AMPA_syn_inh(0.768249)")
h("a_pyramidals_48[18].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.27712512), syn_SmallNet_pyr_bask_AMPA_syn_inh[238], 0.0, 0.0, 1.0))")
# Connection 239: 27, seg 0 (0.476465) -> 11, seg 1 (0.119692)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[239] = new AMPA_syn_inh(0.119692)")
h("a_pyramidals_48[27].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.47646457), syn_SmallNet_pyr_bask_AMPA_syn_inh[239], 0.0, 0.0, 1.0))")
# Connection 240: 7, seg 0 (0.226403) -> 11, seg 1 (0.138409)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[240] = new AMPA_syn_inh(0.138409)")
h("a_pyramidals_48[7].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.22640294), syn_SmallNet_pyr_bask_AMPA_syn_inh[240], 0.0, 0.0, 1.0))")
# Connection 241: 19, seg 0 (0.925217) -> 11, seg 1 (0.440934)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[241] = new AMPA_syn_inh(0.440934)")
h("a_pyramidals_48[19].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.92521685), syn_SmallNet_pyr_bask_AMPA_syn_inh[241], 0.0, 0.0, 1.0))")
# Connection 242: 6, seg 0 (0.318544) -> 11, seg 1 (0.424474)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[242] = new AMPA_syn_inh(0.424474)")
h("a_pyramidals_48[6].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.31854433), syn_SmallNet_pyr_bask_AMPA_syn_inh[242], 0.0, 0.0, 1.0))")
# Connection 243: 5, seg 0 (0.739033) -> 11, seg 1 (0.929529)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[243] = new AMPA_syn_inh(0.929529)")
h("a_pyramidals_48[5].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.7390334), syn_SmallNet_pyr_bask_AMPA_syn_inh[243], 0.0, 0.0, 1.0))")
# Connection 244: 33, seg 0 (0.313017) -> 11, seg 1 (0.548774)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[244] = new AMPA_syn_inh(0.548774)")
h("a_pyramidals_48[33].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.31301737), syn_SmallNet_pyr_bask_AMPA_syn_inh[244], 0.0, 0.0, 1.0))")
# Connection 245: 23, seg 0 (0.660571) -> 11, seg 1 (0.665498)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[245] = new AMPA_syn_inh(0.665498)")
h("a_pyramidals_48[23].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.6605705), syn_SmallNet_pyr_bask_AMPA_syn_inh[245], 0.0, 0.0, 1.0))")
# Connection 246: 1, seg 0 (0.706519) -> 11, seg 1 (0.562126)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[246] = new AMPA_syn_inh(0.562126)")
h("a_pyramidals_48[1].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.7065194), syn_SmallNet_pyr_bask_AMPA_syn_inh[246], 0.0, 0.0, 1.0))")
# Connection 247: 14, seg 0 (0.277137) -> 11, seg 1 (0.492043)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[247] = new AMPA_syn_inh(0.492043)")
h("a_pyramidals_48[14].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.27713716), syn_SmallNet_pyr_bask_AMPA_syn_inh[247], 0.0, 0.0, 1.0))")
# Connection 248: 25, seg 0 (0.554438) -> 11, seg 1 (0.783982)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[248] = new AMPA_syn_inh(0.783982)")
h("a_pyramidals_48[25].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.5544384), syn_SmallNet_pyr_bask_AMPA_syn_inh[248], 0.0, 0.0, 1.0))")
# Connection 249: 8, seg 0 (0.445815) -> 11, seg 1 (0.195738)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[249] = new AMPA_syn_inh(0.195738)")
h("a_pyramidals_48[8].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.44581532), syn_SmallNet_pyr_bask_AMPA_syn_inh[249], 0.0, 0.0, 1.0))")
# Connection 250: 4, seg 0 (0.349567) -> 11, seg 1 (0.641190)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[250] = new AMPA_syn_inh(0.641190)")
h("a_pyramidals_48[4].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.34956664), syn_SmallNet_pyr_bask_AMPA_syn_inh[250], 0.0, 0.0, 1.0))")
# Connection 251: 14, seg 0 (0.504711) -> 11, seg 1 (0.497331)
h("a_baskets_12[11].dend syn_SmallNet_pyr_bask_AMPA_syn_inh[251] = new AMPA_syn_inh(0.497331)")
h("a_pyramidals_48[14].soma a_baskets_12[11].synlist.append(new NetCon(&v(0.50471133), syn_SmallNet_pyr_bask_AMPA_syn_inh[251], 0.0, 0.0, 1.0))")
# Adding input: Component(id=0 type=input)
h("objectvar BackgroundRandomIClamps_0")
h("a_pyramidals_48[37].soma { BackgroundRandomIClamps_0 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=1 type=input)
h("objectvar BackgroundRandomIClamps_1")
h("a_pyramidals_48[35].soma { BackgroundRandomIClamps_1 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=2 type=input)
h("objectvar BackgroundRandomIClamps_2")
h("a_pyramidals_48[28].soma { BackgroundRandomIClamps_2 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=3 type=input)
h("objectvar BackgroundRandomIClamps_3")
h("a_pyramidals_48[5].soma { BackgroundRandomIClamps_3 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=4 type=input)
h("objectvar BackgroundRandomIClamps_4")
h("a_pyramidals_48[20].soma { BackgroundRandomIClamps_4 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=5 type=input)
h("objectvar BackgroundRandomIClamps_5")
h("a_pyramidals_48[19].soma { BackgroundRandomIClamps_5 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=6 type=input)
h("objectvar BackgroundRandomIClamps_6")
h("a_pyramidals_48[0].soma { BackgroundRandomIClamps_6 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=7 type=input)
h("objectvar BackgroundRandomIClamps_7")
h("a_pyramidals_48[38].soma { BackgroundRandomIClamps_7 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=8 type=input)
h("objectvar BackgroundRandomIClamps_8")
h("a_pyramidals_48[1].soma { BackgroundRandomIClamps_8 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=9 type=input)
h("objectvar BackgroundRandomIClamps_9")
h("a_pyramidals_48[7].soma { BackgroundRandomIClamps_9 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=10 type=input)
h("objectvar BackgroundRandomIClamps_10")
h("a_pyramidals_48[3].soma { BackgroundRandomIClamps_10 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=11 type=input)
h("objectvar BackgroundRandomIClamps_11")
h("a_pyramidals_48[11].soma { BackgroundRandomIClamps_11 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=12 type=input)
h("objectvar BackgroundRandomIClamps_12")
h("a_pyramidals_48[21].soma { BackgroundRandomIClamps_12 = new BackgroundRandomIClamps(0.500000) } ")
# Adding input: Component(id=13 type=input)
h("objectvar BackgroundRandomIClamps_13")
h("a_pyramidals_48[15].soma { BackgroundRandomIClamps_13 = new BackgroundRandomIClamps(0.500000) } ")
trec = h.Vector()
trec.record(h._ref_t)
h.tstop = 300.0
h.dt = 0.01
h.steps_per_ms = 100.0
# File to save: outputFile16
# Column: pyramidals_48/16/pyr_4_sym/3/v
h(' objectvar v_v_outputFile16 ')
h(' { v_v_outputFile16 = new Vector() } ')
h(' v_v_outputFile16.record(&a_pyramidals_48[16].apical3.v(0.5)) ')
h.v_v_outputFile16.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/17/pyr_4_sym/3/v
h(' objectvar v_v_outputFile16_ ')
h(' { v_v_outputFile16_ = new Vector() } ')
h(' v_v_outputFile16_.record(&a_pyramidals_48[17].apical3.v(0.5)) ')
h.v_v_outputFile16_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/18/pyr_4_sym/3/v
h(' objectvar v_v_outputFile16__ ')
h(' { v_v_outputFile16__ = new Vector() } ')
h(' v_v_outputFile16__.record(&a_pyramidals_48[18].apical3.v(0.5)) ')
h.v_v_outputFile16__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/19/pyr_4_sym/3/v
h(' objectvar v_v_outputFile16___ ')
h(' { v_v_outputFile16___ = new Vector() } ')
h(' v_v_outputFile16___.record(&a_pyramidals_48[19].apical3.v(0.5)) ')
h.v_v_outputFile16___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/20/pyr_4_sym/3/v
h(' objectvar v_v_outputFile16____ ')
h(' { v_v_outputFile16____ = new Vector() } ')
h(' v_v_outputFile16____.record(&a_pyramidals_48[20].apical3.v(0.5)) ')
h.v_v_outputFile16____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/21/pyr_4_sym/3/v
h(' objectvar v_v_outputFile16_____ ')
h(' { v_v_outputFile16_____ = new Vector() } ')
h(' v_v_outputFile16_____.record(&a_pyramidals_48[21].apical3.v(0.5)) ')
h.v_v_outputFile16_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/22/pyr_4_sym/3/v
h(' objectvar v_v_outputFile16______ ')
h(' { v_v_outputFile16______ = new Vector() } ')
h(' v_v_outputFile16______.record(&a_pyramidals_48[22].apical3.v(0.5)) ')
h.v_v_outputFile16______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/23/pyr_4_sym/3/v
h(' objectvar v_v_outputFile16_______ ')
h(' { v_v_outputFile16_______ = new Vector() } ')
h(' v_v_outputFile16_______.record(&a_pyramidals_48[23].apical3.v(0.5)) ')
h.v_v_outputFile16_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/24/pyr_4_sym/3/v
h(' objectvar v_v_outputFile16________ ')
h(' { v_v_outputFile16________ = new Vector() } ')
h(' v_v_outputFile16________.record(&a_pyramidals_48[24].apical3.v(0.5)) ')
h.v_v_outputFile16________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/25/pyr_4_sym/3/v
h(' objectvar v_v_outputFile16_________ ')
h(' { v_v_outputFile16_________ = new Vector() } ')
h(' v_v_outputFile16_________.record(&a_pyramidals_48[25].apical3.v(0.5)) ')
h.v_v_outputFile16_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile17
# Column: pyramidals_48/26/pyr_4_sym/3/v
h(' objectvar v_v_outputFile17 ')
h(' { v_v_outputFile17 = new Vector() } ')
h(' v_v_outputFile17.record(&a_pyramidals_48[26].apical3.v(0.5)) ')
h.v_v_outputFile17.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/27/pyr_4_sym/3/v
h(' objectvar v_v_outputFile17_ ')
h(' { v_v_outputFile17_ = new Vector() } ')
h(' v_v_outputFile17_.record(&a_pyramidals_48[27].apical3.v(0.5)) ')
h.v_v_outputFile17_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/28/pyr_4_sym/3/v
h(' objectvar v_v_outputFile17__ ')
h(' { v_v_outputFile17__ = new Vector() } ')
h(' v_v_outputFile17__.record(&a_pyramidals_48[28].apical3.v(0.5)) ')
h.v_v_outputFile17__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/29/pyr_4_sym/3/v
h(' objectvar v_v_outputFile17___ ')
h(' { v_v_outputFile17___ = new Vector() } ')
h(' v_v_outputFile17___.record(&a_pyramidals_48[29].apical3.v(0.5)) ')
h.v_v_outputFile17___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/30/pyr_4_sym/3/v
h(' objectvar v_v_outputFile17____ ')
h(' { v_v_outputFile17____ = new Vector() } ')
h(' v_v_outputFile17____.record(&a_pyramidals_48[30].apical3.v(0.5)) ')
h.v_v_outputFile17____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/31/pyr_4_sym/3/v
h(' objectvar v_v_outputFile17_____ ')
h(' { v_v_outputFile17_____ = new Vector() } ')
h(' v_v_outputFile17_____.record(&a_pyramidals_48[31].apical3.v(0.5)) ')
h.v_v_outputFile17_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/32/pyr_4_sym/3/v
h(' objectvar v_v_outputFile17______ ')
h(' { v_v_outputFile17______ = new Vector() } ')
h(' v_v_outputFile17______.record(&a_pyramidals_48[32].apical3.v(0.5)) ')
h.v_v_outputFile17______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/33/pyr_4_sym/3/v
h(' objectvar v_v_outputFile17_______ ')
h(' { v_v_outputFile17_______ = new Vector() } ')
h(' v_v_outputFile17_______.record(&a_pyramidals_48[33].apical3.v(0.5)) ')
h.v_v_outputFile17_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/34/pyr_4_sym/3/v
h(' objectvar v_v_outputFile17________ ')
h(' { v_v_outputFile17________ = new Vector() } ')
h(' v_v_outputFile17________.record(&a_pyramidals_48[34].apical3.v(0.5)) ')
h.v_v_outputFile17________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/35/pyr_4_sym/3/v
h(' objectvar v_v_outputFile17_________ ')
h(' { v_v_outputFile17_________ = new Vector() } ')
h(' v_v_outputFile17_________.record(&a_pyramidals_48[35].apical3.v(0.5)) ')
h.v_v_outputFile17_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile14
# Column: pyramidals_48/44/pyr_4_sym/2/v
h(' objectvar v_v_outputFile14 ')
h(' { v_v_outputFile14 = new Vector() } ')
h(' v_v_outputFile14.record(&a_pyramidals_48[44].apical2.v(0.5)) ')
h.v_v_outputFile14.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/45/pyr_4_sym/2/v
h(' objectvar v_v_outputFile14_ ')
h(' { v_v_outputFile14_ = new Vector() } ')
h(' v_v_outputFile14_.record(&a_pyramidals_48[45].apical2.v(0.5)) ')
h.v_v_outputFile14_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/46/pyr_4_sym/2/v
h(' objectvar v_v_outputFile14__ ')
h(' { v_v_outputFile14__ = new Vector() } ')
h(' v_v_outputFile14__.record(&a_pyramidals_48[46].apical2.v(0.5)) ')
h.v_v_outputFile14__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/47/pyr_4_sym/2/v
h(' objectvar v_v_outputFile14___ ')
h(' { v_v_outputFile14___ = new Vector() } ')
h(' v_v_outputFile14___.record(&a_pyramidals_48[47].apical2.v(0.5)) ')
h.v_v_outputFile14___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/0/pyr_4_sym/3/v
h(' objectvar v_v_outputFile14____ ')
h(' { v_v_outputFile14____ = new Vector() } ')
h(' v_v_outputFile14____.record(&a_pyramidals_48[0].apical3.v(0.5)) ')
h.v_v_outputFile14____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/1/pyr_4_sym/3/v
h(' objectvar v_v_outputFile14_____ ')
h(' { v_v_outputFile14_____ = new Vector() } ')
h(' v_v_outputFile14_____.record(&a_pyramidals_48[1].apical3.v(0.5)) ')
h.v_v_outputFile14_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/2/pyr_4_sym/3/v
h(' objectvar v_v_outputFile14______ ')
h(' { v_v_outputFile14______ = new Vector() } ')
h(' v_v_outputFile14______.record(&a_pyramidals_48[2].apical3.v(0.5)) ')
h.v_v_outputFile14______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/3/pyr_4_sym/3/v
h(' objectvar v_v_outputFile14_______ ')
h(' { v_v_outputFile14_______ = new Vector() } ')
h(' v_v_outputFile14_______.record(&a_pyramidals_48[3].apical3.v(0.5)) ')
h.v_v_outputFile14_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/4/pyr_4_sym/3/v
h(' objectvar v_v_outputFile14________ ')
h(' { v_v_outputFile14________ = new Vector() } ')
h(' v_v_outputFile14________.record(&a_pyramidals_48[4].apical3.v(0.5)) ')
h.v_v_outputFile14________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/5/pyr_4_sym/3/v
h(' objectvar v_v_outputFile14_________ ')
h(' { v_v_outputFile14_________ = new Vector() } ')
h(' v_v_outputFile14_________.record(&a_pyramidals_48[5].apical3.v(0.5)) ')
h.v_v_outputFile14_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile15
# Column: pyramidals_48/6/pyr_4_sym/3/v
h(' objectvar v_v_outputFile15 ')
h(' { v_v_outputFile15 = new Vector() } ')
h(' v_v_outputFile15.record(&a_pyramidals_48[6].apical3.v(0.5)) ')
h.v_v_outputFile15.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/7/pyr_4_sym/3/v
h(' objectvar v_v_outputFile15_ ')
h(' { v_v_outputFile15_ = new Vector() } ')
h(' v_v_outputFile15_.record(&a_pyramidals_48[7].apical3.v(0.5)) ')
h.v_v_outputFile15_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/8/pyr_4_sym/3/v
h(' objectvar v_v_outputFile15__ ')
h(' { v_v_outputFile15__ = new Vector() } ')
h(' v_v_outputFile15__.record(&a_pyramidals_48[8].apical3.v(0.5)) ')
h.v_v_outputFile15__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/9/pyr_4_sym/3/v
h(' objectvar v_v_outputFile15___ ')
h(' { v_v_outputFile15___ = new Vector() } ')
h(' v_v_outputFile15___.record(&a_pyramidals_48[9].apical3.v(0.5)) ')
h.v_v_outputFile15___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/10/pyr_4_sym/3/v
h(' objectvar v_v_outputFile15____ ')
h(' { v_v_outputFile15____ = new Vector() } ')
h(' v_v_outputFile15____.record(&a_pyramidals_48[10].apical3.v(0.5)) ')
h.v_v_outputFile15____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/11/pyr_4_sym/3/v
h(' objectvar v_v_outputFile15_____ ')
h(' { v_v_outputFile15_____ = new Vector() } ')
h(' v_v_outputFile15_____.record(&a_pyramidals_48[11].apical3.v(0.5)) ')
h.v_v_outputFile15_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/12/pyr_4_sym/3/v
h(' objectvar v_v_outputFile15______ ')
h(' { v_v_outputFile15______ = new Vector() } ')
h(' v_v_outputFile15______.record(&a_pyramidals_48[12].apical3.v(0.5)) ')
h.v_v_outputFile15______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/13/pyr_4_sym/3/v
h(' objectvar v_v_outputFile15_______ ')
h(' { v_v_outputFile15_______ = new Vector() } ')
h(' v_v_outputFile15_______.record(&a_pyramidals_48[13].apical3.v(0.5)) ')
h.v_v_outputFile15_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/14/pyr_4_sym/3/v
h(' objectvar v_v_outputFile15________ ')
h(' { v_v_outputFile15________ = new Vector() } ')
h(' v_v_outputFile15________.record(&a_pyramidals_48[14].apical3.v(0.5)) ')
h.v_v_outputFile15________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/15/pyr_4_sym/3/v
h(' objectvar v_v_outputFile15_________ ')
h(' { v_v_outputFile15_________ = new Vector() } ')
h(' v_v_outputFile15_________.record(&a_pyramidals_48[15].apical3.v(0.5)) ')
h.v_v_outputFile15_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile38
# Column: pyramidals_48/44/pyr_4_sym/7/v
h(' objectvar v_v_outputFile38 ')
h(' { v_v_outputFile38 = new Vector() } ')
h(' v_v_outputFile38.record(&a_pyramidals_48[44].basal1.v(0.5)) ')
h.v_v_outputFile38.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/45/pyr_4_sym/7/v
h(' objectvar v_v_outputFile38_ ')
h(' { v_v_outputFile38_ = new Vector() } ')
h(' v_v_outputFile38_.record(&a_pyramidals_48[45].basal1.v(0.5)) ')
h.v_v_outputFile38_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/46/pyr_4_sym/7/v
h(' objectvar v_v_outputFile38__ ')
h(' { v_v_outputFile38__ = new Vector() } ')
h(' v_v_outputFile38__.record(&a_pyramidals_48[46].basal1.v(0.5)) ')
h.v_v_outputFile38__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/47/pyr_4_sym/7/v
h(' objectvar v_v_outputFile38___ ')
h(' { v_v_outputFile38___ = new Vector() } ')
h(' v_v_outputFile38___.record(&a_pyramidals_48[47].basal1.v(0.5)) ')
h.v_v_outputFile38___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/0/pyr_4_sym/8/v
h(' objectvar v_v_outputFile38____ ')
h(' { v_v_outputFile38____ = new Vector() } ')
h(' v_v_outputFile38____.record(&a_pyramidals_48[0].basal2.v(0.5)) ')
h.v_v_outputFile38____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/1/pyr_4_sym/8/v
h(' objectvar v_v_outputFile38_____ ')
h(' { v_v_outputFile38_____ = new Vector() } ')
h(' v_v_outputFile38_____.record(&a_pyramidals_48[1].basal2.v(0.5)) ')
h.v_v_outputFile38_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/2/pyr_4_sym/8/v
h(' objectvar v_v_outputFile38______ ')
h(' { v_v_outputFile38______ = new Vector() } ')
h(' v_v_outputFile38______.record(&a_pyramidals_48[2].basal2.v(0.5)) ')
h.v_v_outputFile38______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/3/pyr_4_sym/8/v
h(' objectvar v_v_outputFile38_______ ')
h(' { v_v_outputFile38_______ = new Vector() } ')
h(' v_v_outputFile38_______.record(&a_pyramidals_48[3].basal2.v(0.5)) ')
h.v_v_outputFile38_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/4/pyr_4_sym/8/v
h(' objectvar v_v_outputFile38________ ')
h(' { v_v_outputFile38________ = new Vector() } ')
h(' v_v_outputFile38________.record(&a_pyramidals_48[4].basal2.v(0.5)) ')
h.v_v_outputFile38________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/5/pyr_4_sym/8/v
h(' objectvar v_v_outputFile38_________ ')
h(' { v_v_outputFile38_________ = new Vector() } ')
h(' v_v_outputFile38_________.record(&a_pyramidals_48[5].basal2.v(0.5)) ')
h.v_v_outputFile38_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile39
# Column: pyramidals_48/6/pyr_4_sym/8/v
h(' objectvar v_v_outputFile39 ')
h(' { v_v_outputFile39 = new Vector() } ')
h(' v_v_outputFile39.record(&a_pyramidals_48[6].basal2.v(0.5)) ')
h.v_v_outputFile39.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/7/pyr_4_sym/8/v
h(' objectvar v_v_outputFile39_ ')
h(' { v_v_outputFile39_ = new Vector() } ')
h(' v_v_outputFile39_.record(&a_pyramidals_48[7].basal2.v(0.5)) ')
h.v_v_outputFile39_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/8/pyr_4_sym/8/v
h(' objectvar v_v_outputFile39__ ')
h(' { v_v_outputFile39__ = new Vector() } ')
h(' v_v_outputFile39__.record(&a_pyramidals_48[8].basal2.v(0.5)) ')
h.v_v_outputFile39__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/9/pyr_4_sym/8/v
h(' objectvar v_v_outputFile39___ ')
h(' { v_v_outputFile39___ = new Vector() } ')
h(' v_v_outputFile39___.record(&a_pyramidals_48[9].basal2.v(0.5)) ')
h.v_v_outputFile39___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/10/pyr_4_sym/8/v
h(' objectvar v_v_outputFile39____ ')
h(' { v_v_outputFile39____ = new Vector() } ')
h(' v_v_outputFile39____.record(&a_pyramidals_48[10].basal2.v(0.5)) ')
h.v_v_outputFile39____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/11/pyr_4_sym/8/v
h(' objectvar v_v_outputFile39_____ ')
h(' { v_v_outputFile39_____ = new Vector() } ')
h(' v_v_outputFile39_____.record(&a_pyramidals_48[11].basal2.v(0.5)) ')
h.v_v_outputFile39_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/12/pyr_4_sym/8/v
h(' objectvar v_v_outputFile39______ ')
h(' { v_v_outputFile39______ = new Vector() } ')
h(' v_v_outputFile39______.record(&a_pyramidals_48[12].basal2.v(0.5)) ')
h.v_v_outputFile39______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/13/pyr_4_sym/8/v
h(' objectvar v_v_outputFile39_______ ')
h(' { v_v_outputFile39_______ = new Vector() } ')
h(' v_v_outputFile39_______.record(&a_pyramidals_48[13].basal2.v(0.5)) ')
h.v_v_outputFile39_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/14/pyr_4_sym/8/v
h(' objectvar v_v_outputFile39________ ')
h(' { v_v_outputFile39________ = new Vector() } ')
h(' v_v_outputFile39________.record(&a_pyramidals_48[14].basal2.v(0.5)) ')
h.v_v_outputFile39________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/15/pyr_4_sym/8/v
h(' objectvar v_v_outputFile39_________ ')
h(' { v_v_outputFile39_________ = new Vector() } ')
h(' v_v_outputFile39_________.record(&a_pyramidals_48[15].basal2.v(0.5)) ')
h.v_v_outputFile39_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile36
# Column: pyramidals_48/24/pyr_4_sym/7/v
h(' objectvar v_v_outputFile36 ')
h(' { v_v_outputFile36 = new Vector() } ')
h(' v_v_outputFile36.record(&a_pyramidals_48[24].basal1.v(0.5)) ')
h.v_v_outputFile36.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/25/pyr_4_sym/7/v
h(' objectvar v_v_outputFile36_ ')
h(' { v_v_outputFile36_ = new Vector() } ')
h(' v_v_outputFile36_.record(&a_pyramidals_48[25].basal1.v(0.5)) ')
h.v_v_outputFile36_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/26/pyr_4_sym/7/v
h(' objectvar v_v_outputFile36__ ')
h(' { v_v_outputFile36__ = new Vector() } ')
h(' v_v_outputFile36__.record(&a_pyramidals_48[26].basal1.v(0.5)) ')
h.v_v_outputFile36__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/27/pyr_4_sym/7/v
h(' objectvar v_v_outputFile36___ ')
h(' { v_v_outputFile36___ = new Vector() } ')
h(' v_v_outputFile36___.record(&a_pyramidals_48[27].basal1.v(0.5)) ')
h.v_v_outputFile36___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/28/pyr_4_sym/7/v
h(' objectvar v_v_outputFile36____ ')
h(' { v_v_outputFile36____ = new Vector() } ')
h(' v_v_outputFile36____.record(&a_pyramidals_48[28].basal1.v(0.5)) ')
h.v_v_outputFile36____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/29/pyr_4_sym/7/v
h(' objectvar v_v_outputFile36_____ ')
h(' { v_v_outputFile36_____ = new Vector() } ')
h(' v_v_outputFile36_____.record(&a_pyramidals_48[29].basal1.v(0.5)) ')
h.v_v_outputFile36_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/30/pyr_4_sym/7/v
h(' objectvar v_v_outputFile36______ ')
h(' { v_v_outputFile36______ = new Vector() } ')
h(' v_v_outputFile36______.record(&a_pyramidals_48[30].basal1.v(0.5)) ')
h.v_v_outputFile36______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/31/pyr_4_sym/7/v
h(' objectvar v_v_outputFile36_______ ')
h(' { v_v_outputFile36_______ = new Vector() } ')
h(' v_v_outputFile36_______.record(&a_pyramidals_48[31].basal1.v(0.5)) ')
h.v_v_outputFile36_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/32/pyr_4_sym/7/v
h(' objectvar v_v_outputFile36________ ')
h(' { v_v_outputFile36________ = new Vector() } ')
h(' v_v_outputFile36________.record(&a_pyramidals_48[32].basal1.v(0.5)) ')
h.v_v_outputFile36________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/33/pyr_4_sym/7/v
h(' objectvar v_v_outputFile36_________ ')
h(' { v_v_outputFile36_________ = new Vector() } ')
h(' v_v_outputFile36_________.record(&a_pyramidals_48[33].basal1.v(0.5)) ')
h.v_v_outputFile36_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile18
# Column: pyramidals_48/36/pyr_4_sym/3/v
h(' objectvar v_v_outputFile18 ')
h(' { v_v_outputFile18 = new Vector() } ')
h(' v_v_outputFile18.record(&a_pyramidals_48[36].apical3.v(0.5)) ')
h.v_v_outputFile18.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/37/pyr_4_sym/3/v
h(' objectvar v_v_outputFile18_ ')
h(' { v_v_outputFile18_ = new Vector() } ')
h(' v_v_outputFile18_.record(&a_pyramidals_48[37].apical3.v(0.5)) ')
h.v_v_outputFile18_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/38/pyr_4_sym/3/v
h(' objectvar v_v_outputFile18__ ')
h(' { v_v_outputFile18__ = new Vector() } ')
h(' v_v_outputFile18__.record(&a_pyramidals_48[38].apical3.v(0.5)) ')
h.v_v_outputFile18__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/39/pyr_4_sym/3/v
h(' objectvar v_v_outputFile18___ ')
h(' { v_v_outputFile18___ = new Vector() } ')
h(' v_v_outputFile18___.record(&a_pyramidals_48[39].apical3.v(0.5)) ')
h.v_v_outputFile18___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/40/pyr_4_sym/3/v
h(' objectvar v_v_outputFile18____ ')
h(' { v_v_outputFile18____ = new Vector() } ')
h(' v_v_outputFile18____.record(&a_pyramidals_48[40].apical3.v(0.5)) ')
h.v_v_outputFile18____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/41/pyr_4_sym/3/v
h(' objectvar v_v_outputFile18_____ ')
h(' { v_v_outputFile18_____ = new Vector() } ')
h(' v_v_outputFile18_____.record(&a_pyramidals_48[41].apical3.v(0.5)) ')
h.v_v_outputFile18_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/42/pyr_4_sym/3/v
h(' objectvar v_v_outputFile18______ ')
h(' { v_v_outputFile18______ = new Vector() } ')
h(' v_v_outputFile18______.record(&a_pyramidals_48[42].apical3.v(0.5)) ')
h.v_v_outputFile18______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/43/pyr_4_sym/3/v
h(' objectvar v_v_outputFile18_______ ')
h(' { v_v_outputFile18_______ = new Vector() } ')
h(' v_v_outputFile18_______.record(&a_pyramidals_48[43].apical3.v(0.5)) ')
h.v_v_outputFile18_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/44/pyr_4_sym/3/v
h(' objectvar v_v_outputFile18________ ')
h(' { v_v_outputFile18________ = new Vector() } ')
h(' v_v_outputFile18________.record(&a_pyramidals_48[44].apical3.v(0.5)) ')
h.v_v_outputFile18________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/45/pyr_4_sym/3/v
h(' objectvar v_v_outputFile18_________ ')
h(' { v_v_outputFile18_________ = new Vector() } ')
h(' v_v_outputFile18_________.record(&a_pyramidals_48[45].apical3.v(0.5)) ')
h.v_v_outputFile18_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile37
# Column: pyramidals_48/34/pyr_4_sym/7/v
h(' objectvar v_v_outputFile37 ')
h(' { v_v_outputFile37 = new Vector() } ')
h(' v_v_outputFile37.record(&a_pyramidals_48[34].basal1.v(0.5)) ')
h.v_v_outputFile37.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/35/pyr_4_sym/7/v
h(' objectvar v_v_outputFile37_ ')
h(' { v_v_outputFile37_ = new Vector() } ')
h(' v_v_outputFile37_.record(&a_pyramidals_48[35].basal1.v(0.5)) ')
h.v_v_outputFile37_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/36/pyr_4_sym/7/v
h(' objectvar v_v_outputFile37__ ')
h(' { v_v_outputFile37__ = new Vector() } ')
h(' v_v_outputFile37__.record(&a_pyramidals_48[36].basal1.v(0.5)) ')
h.v_v_outputFile37__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/37/pyr_4_sym/7/v
h(' objectvar v_v_outputFile37___ ')
h(' { v_v_outputFile37___ = new Vector() } ')
h(' v_v_outputFile37___.record(&a_pyramidals_48[37].basal1.v(0.5)) ')
h.v_v_outputFile37___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/38/pyr_4_sym/7/v
h(' objectvar v_v_outputFile37____ ')
h(' { v_v_outputFile37____ = new Vector() } ')
h(' v_v_outputFile37____.record(&a_pyramidals_48[38].basal1.v(0.5)) ')
h.v_v_outputFile37____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/39/pyr_4_sym/7/v
h(' objectvar v_v_outputFile37_____ ')
h(' { v_v_outputFile37_____ = new Vector() } ')
h(' v_v_outputFile37_____.record(&a_pyramidals_48[39].basal1.v(0.5)) ')
h.v_v_outputFile37_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/40/pyr_4_sym/7/v
h(' objectvar v_v_outputFile37______ ')
h(' { v_v_outputFile37______ = new Vector() } ')
h(' v_v_outputFile37______.record(&a_pyramidals_48[40].basal1.v(0.5)) ')
h.v_v_outputFile37______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/41/pyr_4_sym/7/v
h(' objectvar v_v_outputFile37_______ ')
h(' { v_v_outputFile37_______ = new Vector() } ')
h(' v_v_outputFile37_______.record(&a_pyramidals_48[41].basal1.v(0.5)) ')
h.v_v_outputFile37_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/42/pyr_4_sym/7/v
h(' objectvar v_v_outputFile37________ ')
h(' { v_v_outputFile37________ = new Vector() } ')
h(' v_v_outputFile37________.record(&a_pyramidals_48[42].basal1.v(0.5)) ')
h.v_v_outputFile37________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/43/pyr_4_sym/7/v
h(' objectvar v_v_outputFile37_________ ')
h(' { v_v_outputFile37_________ = new Vector() } ')
h(' v_v_outputFile37_________.record(&a_pyramidals_48[43].basal1.v(0.5)) ')
h.v_v_outputFile37_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile19
# Column: pyramidals_48/46/pyr_4_sym/3/v
h(' objectvar v_v_outputFile19 ')
h(' { v_v_outputFile19 = new Vector() } ')
h(' v_v_outputFile19.record(&a_pyramidals_48[46].apical3.v(0.5)) ')
h.v_v_outputFile19.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/47/pyr_4_sym/3/v
h(' objectvar v_v_outputFile19_ ')
h(' { v_v_outputFile19_ = new Vector() } ')
h(' v_v_outputFile19_.record(&a_pyramidals_48[47].apical3.v(0.5)) ')
h.v_v_outputFile19_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/0/pyr_4_sym/4/v
h(' objectvar v_v_outputFile19__ ')
h(' { v_v_outputFile19__ = new Vector() } ')
h(' v_v_outputFile19__.record(&a_pyramidals_48[0].apical4.v(0.5)) ')
h.v_v_outputFile19__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/1/pyr_4_sym/4/v
h(' objectvar v_v_outputFile19___ ')
h(' { v_v_outputFile19___ = new Vector() } ')
h(' v_v_outputFile19___.record(&a_pyramidals_48[1].apical4.v(0.5)) ')
h.v_v_outputFile19___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/2/pyr_4_sym/4/v
h(' objectvar v_v_outputFile19____ ')
h(' { v_v_outputFile19____ = new Vector() } ')
h(' v_v_outputFile19____.record(&a_pyramidals_48[2].apical4.v(0.5)) ')
h.v_v_outputFile19____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/3/pyr_4_sym/4/v
h(' objectvar v_v_outputFile19_____ ')
h(' { v_v_outputFile19_____ = new Vector() } ')
h(' v_v_outputFile19_____.record(&a_pyramidals_48[3].apical4.v(0.5)) ')
h.v_v_outputFile19_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/4/pyr_4_sym/4/v
h(' objectvar v_v_outputFile19______ ')
h(' { v_v_outputFile19______ = new Vector() } ')
h(' v_v_outputFile19______.record(&a_pyramidals_48[4].apical4.v(0.5)) ')
h.v_v_outputFile19______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/5/pyr_4_sym/4/v
h(' objectvar v_v_outputFile19_______ ')
h(' { v_v_outputFile19_______ = new Vector() } ')
h(' v_v_outputFile19_______.record(&a_pyramidals_48[5].apical4.v(0.5)) ')
h.v_v_outputFile19_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/6/pyr_4_sym/4/v
h(' objectvar v_v_outputFile19________ ')
h(' { v_v_outputFile19________ = new Vector() } ')
h(' v_v_outputFile19________.record(&a_pyramidals_48[6].apical4.v(0.5)) ')
h.v_v_outputFile19________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/7/pyr_4_sym/4/v
h(' objectvar v_v_outputFile19_________ ')
h(' { v_v_outputFile19_________ = new Vector() } ')
h(' v_v_outputFile19_________.record(&a_pyramidals_48[7].apical4.v(0.5)) ')
h.v_v_outputFile19_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: time
# Column: time
h(' objectvar v_time ')
h(' { v_time = new Vector() } ')
h(' v_time.record(&t) ')
h.v_time.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile0
# Column: pyramidals_48/0/pyr_4_sym/0/v
h(' objectvar v_v_outputFile0 ')
h(' { v_v_outputFile0 = new Vector() } ')
h(' v_v_outputFile0.record(&a_pyramidals_48[0].soma.v(0.5)) ')
h.v_v_outputFile0.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/1/pyr_4_sym/0/v
h(' objectvar v_v_outputFile0_ ')
h(' { v_v_outputFile0_ = new Vector() } ')
h(' v_v_outputFile0_.record(&a_pyramidals_48[1].soma.v(0.5)) ')
h.v_v_outputFile0_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/2/pyr_4_sym/0/v
h(' objectvar v_v_outputFile0__ ')
h(' { v_v_outputFile0__ = new Vector() } ')
h(' v_v_outputFile0__.record(&a_pyramidals_48[2].soma.v(0.5)) ')
h.v_v_outputFile0__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/3/pyr_4_sym/0/v
h(' objectvar v_v_outputFile0___ ')
h(' { v_v_outputFile0___ = new Vector() } ')
h(' v_v_outputFile0___.record(&a_pyramidals_48[3].soma.v(0.5)) ')
h.v_v_outputFile0___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/4/pyr_4_sym/0/v
h(' objectvar v_v_outputFile0____ ')
h(' { v_v_outputFile0____ = new Vector() } ')
h(' v_v_outputFile0____.record(&a_pyramidals_48[4].soma.v(0.5)) ')
h.v_v_outputFile0____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/5/pyr_4_sym/0/v
h(' objectvar v_v_outputFile0_____ ')
h(' { v_v_outputFile0_____ = new Vector() } ')
h(' v_v_outputFile0_____.record(&a_pyramidals_48[5].soma.v(0.5)) ')
h.v_v_outputFile0_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/6/pyr_4_sym/0/v
h(' objectvar v_v_outputFile0______ ')
h(' { v_v_outputFile0______ = new Vector() } ')
h(' v_v_outputFile0______.record(&a_pyramidals_48[6].soma.v(0.5)) ')
h.v_v_outputFile0______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/7/pyr_4_sym/0/v
h(' objectvar v_v_outputFile0_______ ')
h(' { v_v_outputFile0_______ = new Vector() } ')
h(' v_v_outputFile0_______.record(&a_pyramidals_48[7].soma.v(0.5)) ')
h.v_v_outputFile0_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/8/pyr_4_sym/0/v
h(' objectvar v_v_outputFile0________ ')
h(' { v_v_outputFile0________ = new Vector() } ')
h(' v_v_outputFile0________.record(&a_pyramidals_48[8].soma.v(0.5)) ')
h.v_v_outputFile0________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/9/pyr_4_sym/0/v
h(' objectvar v_v_outputFile0_________ ')
h(' { v_v_outputFile0_________ = new Vector() } ')
h(' v_v_outputFile0_________.record(&a_pyramidals_48[9].soma.v(0.5)) ')
h.v_v_outputFile0_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile1
# Column: pyramidals_48/10/pyr_4_sym/0/v
h(' objectvar v_v_outputFile1 ')
h(' { v_v_outputFile1 = new Vector() } ')
h(' v_v_outputFile1.record(&a_pyramidals_48[10].soma.v(0.5)) ')
h.v_v_outputFile1.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/11/pyr_4_sym/0/v
h(' objectvar v_v_outputFile1_ ')
h(' { v_v_outputFile1_ = new Vector() } ')
h(' v_v_outputFile1_.record(&a_pyramidals_48[11].soma.v(0.5)) ')
h.v_v_outputFile1_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/12/pyr_4_sym/0/v
h(' objectvar v_v_outputFile1__ ')
h(' { v_v_outputFile1__ = new Vector() } ')
h(' v_v_outputFile1__.record(&a_pyramidals_48[12].soma.v(0.5)) ')
h.v_v_outputFile1__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/13/pyr_4_sym/0/v
h(' objectvar v_v_outputFile1___ ')
h(' { v_v_outputFile1___ = new Vector() } ')
h(' v_v_outputFile1___.record(&a_pyramidals_48[13].soma.v(0.5)) ')
h.v_v_outputFile1___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/14/pyr_4_sym/0/v
h(' objectvar v_v_outputFile1____ ')
h(' { v_v_outputFile1____ = new Vector() } ')
h(' v_v_outputFile1____.record(&a_pyramidals_48[14].soma.v(0.5)) ')
h.v_v_outputFile1____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/15/pyr_4_sym/0/v
h(' objectvar v_v_outputFile1_____ ')
h(' { v_v_outputFile1_____ = new Vector() } ')
h(' v_v_outputFile1_____.record(&a_pyramidals_48[15].soma.v(0.5)) ')
h.v_v_outputFile1_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/16/pyr_4_sym/0/v
h(' objectvar v_v_outputFile1______ ')
h(' { v_v_outputFile1______ = new Vector() } ')
h(' v_v_outputFile1______.record(&a_pyramidals_48[16].soma.v(0.5)) ')
h.v_v_outputFile1______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/17/pyr_4_sym/0/v
h(' objectvar v_v_outputFile1_______ ')
h(' { v_v_outputFile1_______ = new Vector() } ')
h(' v_v_outputFile1_______.record(&a_pyramidals_48[17].soma.v(0.5)) ')
h.v_v_outputFile1_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/18/pyr_4_sym/0/v
h(' objectvar v_v_outputFile1________ ')
h(' { v_v_outputFile1________ = new Vector() } ')
h(' v_v_outputFile1________.record(&a_pyramidals_48[18].soma.v(0.5)) ')
h.v_v_outputFile1________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/19/pyr_4_sym/0/v
h(' objectvar v_v_outputFile1_________ ')
h(' { v_v_outputFile1_________ = new Vector() } ')
h(' v_v_outputFile1_________.record(&a_pyramidals_48[19].soma.v(0.5)) ')
h.v_v_outputFile1_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile2
# Column: pyramidals_48/20/pyr_4_sym/0/v
h(' objectvar v_v_outputFile2 ')
h(' { v_v_outputFile2 = new Vector() } ')
h(' v_v_outputFile2.record(&a_pyramidals_48[20].soma.v(0.5)) ')
h.v_v_outputFile2.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/21/pyr_4_sym/0/v
h(' objectvar v_v_outputFile2_ ')
h(' { v_v_outputFile2_ = new Vector() } ')
h(' v_v_outputFile2_.record(&a_pyramidals_48[21].soma.v(0.5)) ')
h.v_v_outputFile2_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/22/pyr_4_sym/0/v
h(' objectvar v_v_outputFile2__ ')
h(' { v_v_outputFile2__ = new Vector() } ')
h(' v_v_outputFile2__.record(&a_pyramidals_48[22].soma.v(0.5)) ')
h.v_v_outputFile2__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/23/pyr_4_sym/0/v
h(' objectvar v_v_outputFile2___ ')
h(' { v_v_outputFile2___ = new Vector() } ')
h(' v_v_outputFile2___.record(&a_pyramidals_48[23].soma.v(0.5)) ')
h.v_v_outputFile2___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/24/pyr_4_sym/0/v
h(' objectvar v_v_outputFile2____ ')
h(' { v_v_outputFile2____ = new Vector() } ')
h(' v_v_outputFile2____.record(&a_pyramidals_48[24].soma.v(0.5)) ')
h.v_v_outputFile2____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/25/pyr_4_sym/0/v
h(' objectvar v_v_outputFile2_____ ')
h(' { v_v_outputFile2_____ = new Vector() } ')
h(' v_v_outputFile2_____.record(&a_pyramidals_48[25].soma.v(0.5)) ')
h.v_v_outputFile2_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/26/pyr_4_sym/0/v
h(' objectvar v_v_outputFile2______ ')
h(' { v_v_outputFile2______ = new Vector() } ')
h(' v_v_outputFile2______.record(&a_pyramidals_48[26].soma.v(0.5)) ')
h.v_v_outputFile2______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/27/pyr_4_sym/0/v
h(' objectvar v_v_outputFile2_______ ')
h(' { v_v_outputFile2_______ = new Vector() } ')
h(' v_v_outputFile2_______.record(&a_pyramidals_48[27].soma.v(0.5)) ')
h.v_v_outputFile2_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/28/pyr_4_sym/0/v
h(' objectvar v_v_outputFile2________ ')
h(' { v_v_outputFile2________ = new Vector() } ')
h(' v_v_outputFile2________.record(&a_pyramidals_48[28].soma.v(0.5)) ')
h.v_v_outputFile2________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/29/pyr_4_sym/0/v
h(' objectvar v_v_outputFile2_________ ')
h(' { v_v_outputFile2_________ = new Vector() } ')
h(' v_v_outputFile2_________.record(&a_pyramidals_48[29].soma.v(0.5)) ')
h.v_v_outputFile2_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile3
# Column: pyramidals_48/30/pyr_4_sym/0/v
h(' objectvar v_v_outputFile3 ')
h(' { v_v_outputFile3 = new Vector() } ')
h(' v_v_outputFile3.record(&a_pyramidals_48[30].soma.v(0.5)) ')
h.v_v_outputFile3.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/31/pyr_4_sym/0/v
h(' objectvar v_v_outputFile3_ ')
h(' { v_v_outputFile3_ = new Vector() } ')
h(' v_v_outputFile3_.record(&a_pyramidals_48[31].soma.v(0.5)) ')
h.v_v_outputFile3_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/32/pyr_4_sym/0/v
h(' objectvar v_v_outputFile3__ ')
h(' { v_v_outputFile3__ = new Vector() } ')
h(' v_v_outputFile3__.record(&a_pyramidals_48[32].soma.v(0.5)) ')
h.v_v_outputFile3__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/33/pyr_4_sym/0/v
h(' objectvar v_v_outputFile3___ ')
h(' { v_v_outputFile3___ = new Vector() } ')
h(' v_v_outputFile3___.record(&a_pyramidals_48[33].soma.v(0.5)) ')
h.v_v_outputFile3___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/34/pyr_4_sym/0/v
h(' objectvar v_v_outputFile3____ ')
h(' { v_v_outputFile3____ = new Vector() } ')
h(' v_v_outputFile3____.record(&a_pyramidals_48[34].soma.v(0.5)) ')
h.v_v_outputFile3____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/35/pyr_4_sym/0/v
h(' objectvar v_v_outputFile3_____ ')
h(' { v_v_outputFile3_____ = new Vector() } ')
h(' v_v_outputFile3_____.record(&a_pyramidals_48[35].soma.v(0.5)) ')
h.v_v_outputFile3_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/36/pyr_4_sym/0/v
h(' objectvar v_v_outputFile3______ ')
h(' { v_v_outputFile3______ = new Vector() } ')
h(' v_v_outputFile3______.record(&a_pyramidals_48[36].soma.v(0.5)) ')
h.v_v_outputFile3______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/37/pyr_4_sym/0/v
h(' objectvar v_v_outputFile3_______ ')
h(' { v_v_outputFile3_______ = new Vector() } ')
h(' v_v_outputFile3_______.record(&a_pyramidals_48[37].soma.v(0.5)) ')
h.v_v_outputFile3_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/38/pyr_4_sym/0/v
h(' objectvar v_v_outputFile3________ ')
h(' { v_v_outputFile3________ = new Vector() } ')
h(' v_v_outputFile3________.record(&a_pyramidals_48[38].soma.v(0.5)) ')
h.v_v_outputFile3________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/39/pyr_4_sym/0/v
h(' objectvar v_v_outputFile3_________ ')
h(' { v_v_outputFile3_________ = new Vector() } ')
h(' v_v_outputFile3_________.record(&a_pyramidals_48[39].soma.v(0.5)) ')
h.v_v_outputFile3_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile4
# Column: pyramidals_48/40/pyr_4_sym/0/v
h(' objectvar v_v_outputFile4 ')
h(' { v_v_outputFile4 = new Vector() } ')
h(' v_v_outputFile4.record(&a_pyramidals_48[40].soma.v(0.5)) ')
h.v_v_outputFile4.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/41/pyr_4_sym/0/v
h(' objectvar v_v_outputFile4_ ')
h(' { v_v_outputFile4_ = new Vector() } ')
h(' v_v_outputFile4_.record(&a_pyramidals_48[41].soma.v(0.5)) ')
h.v_v_outputFile4_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/42/pyr_4_sym/0/v
h(' objectvar v_v_outputFile4__ ')
h(' { v_v_outputFile4__ = new Vector() } ')
h(' v_v_outputFile4__.record(&a_pyramidals_48[42].soma.v(0.5)) ')
h.v_v_outputFile4__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/43/pyr_4_sym/0/v
h(' objectvar v_v_outputFile4___ ')
h(' { v_v_outputFile4___ = new Vector() } ')
h(' v_v_outputFile4___.record(&a_pyramidals_48[43].soma.v(0.5)) ')
h.v_v_outputFile4___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/44/pyr_4_sym/0/v
h(' objectvar v_v_outputFile4____ ')
h(' { v_v_outputFile4____ = new Vector() } ')
h(' v_v_outputFile4____.record(&a_pyramidals_48[44].soma.v(0.5)) ')
h.v_v_outputFile4____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/45/pyr_4_sym/0/v
h(' objectvar v_v_outputFile4_____ ')
h(' { v_v_outputFile4_____ = new Vector() } ')
h(' v_v_outputFile4_____.record(&a_pyramidals_48[45].soma.v(0.5)) ')
h.v_v_outputFile4_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/46/pyr_4_sym/0/v
h(' objectvar v_v_outputFile4______ ')
h(' { v_v_outputFile4______ = new Vector() } ')
h(' v_v_outputFile4______.record(&a_pyramidals_48[46].soma.v(0.5)) ')
h.v_v_outputFile4______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/47/pyr_4_sym/0/v
h(' objectvar v_v_outputFile4_______ ')
h(' { v_v_outputFile4_______ = new Vector() } ')
h(' v_v_outputFile4_______.record(&a_pyramidals_48[47].soma.v(0.5)) ')
h.v_v_outputFile4_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/0/pyr_4_sym/1/v
h(' objectvar v_v_outputFile4________ ')
h(' { v_v_outputFile4________ = new Vector() } ')
h(' v_v_outputFile4________.record(&a_pyramidals_48[0].apical0.v(0.5)) ')
h.v_v_outputFile4________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/1/pyr_4_sym/1/v
h(' objectvar v_v_outputFile4_________ ')
h(' { v_v_outputFile4_________ = new Vector() } ')
h(' v_v_outputFile4_________.record(&a_pyramidals_48[1].apical0.v(0.5)) ')
h.v_v_outputFile4_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile5
# Column: pyramidals_48/2/pyr_4_sym/1/v
h(' objectvar v_v_outputFile5 ')
h(' { v_v_outputFile5 = new Vector() } ')
h(' v_v_outputFile5.record(&a_pyramidals_48[2].apical0.v(0.5)) ')
h.v_v_outputFile5.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/3/pyr_4_sym/1/v
h(' objectvar v_v_outputFile5_ ')
h(' { v_v_outputFile5_ = new Vector() } ')
h(' v_v_outputFile5_.record(&a_pyramidals_48[3].apical0.v(0.5)) ')
h.v_v_outputFile5_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/4/pyr_4_sym/1/v
h(' objectvar v_v_outputFile5__ ')
h(' { v_v_outputFile5__ = new Vector() } ')
h(' v_v_outputFile5__.record(&a_pyramidals_48[4].apical0.v(0.5)) ')
h.v_v_outputFile5__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/5/pyr_4_sym/1/v
h(' objectvar v_v_outputFile5___ ')
h(' { v_v_outputFile5___ = new Vector() } ')
h(' v_v_outputFile5___.record(&a_pyramidals_48[5].apical0.v(0.5)) ')
h.v_v_outputFile5___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/6/pyr_4_sym/1/v
h(' objectvar v_v_outputFile5____ ')
h(' { v_v_outputFile5____ = new Vector() } ')
h(' v_v_outputFile5____.record(&a_pyramidals_48[6].apical0.v(0.5)) ')
h.v_v_outputFile5____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/7/pyr_4_sym/1/v
h(' objectvar v_v_outputFile5_____ ')
h(' { v_v_outputFile5_____ = new Vector() } ')
h(' v_v_outputFile5_____.record(&a_pyramidals_48[7].apical0.v(0.5)) ')
h.v_v_outputFile5_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/8/pyr_4_sym/1/v
h(' objectvar v_v_outputFile5______ ')
h(' { v_v_outputFile5______ = new Vector() } ')
h(' v_v_outputFile5______.record(&a_pyramidals_48[8].apical0.v(0.5)) ')
h.v_v_outputFile5______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/9/pyr_4_sym/1/v
h(' objectvar v_v_outputFile5_______ ')
h(' { v_v_outputFile5_______ = new Vector() } ')
h(' v_v_outputFile5_______.record(&a_pyramidals_48[9].apical0.v(0.5)) ')
h.v_v_outputFile5_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/10/pyr_4_sym/1/v
h(' objectvar v_v_outputFile5________ ')
h(' { v_v_outputFile5________ = new Vector() } ')
h(' v_v_outputFile5________.record(&a_pyramidals_48[10].apical0.v(0.5)) ')
h.v_v_outputFile5________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/11/pyr_4_sym/1/v
h(' objectvar v_v_outputFile5_________ ')
h(' { v_v_outputFile5_________ = new Vector() } ')
h(' v_v_outputFile5_________.record(&a_pyramidals_48[11].apical0.v(0.5)) ')
h.v_v_outputFile5_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile20
# Column: pyramidals_48/8/pyr_4_sym/4/v
h(' objectvar v_v_outputFile20 ')
h(' { v_v_outputFile20 = new Vector() } ')
h(' v_v_outputFile20.record(&a_pyramidals_48[8].apical4.v(0.5)) ')
h.v_v_outputFile20.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/9/pyr_4_sym/4/v
h(' objectvar v_v_outputFile20_ ')
h(' { v_v_outputFile20_ = new Vector() } ')
h(' v_v_outputFile20_.record(&a_pyramidals_48[9].apical4.v(0.5)) ')
h.v_v_outputFile20_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/10/pyr_4_sym/4/v
h(' objectvar v_v_outputFile20__ ')
h(' { v_v_outputFile20__ = new Vector() } ')
h(' v_v_outputFile20__.record(&a_pyramidals_48[10].apical4.v(0.5)) ')
h.v_v_outputFile20__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/11/pyr_4_sym/4/v
h(' objectvar v_v_outputFile20___ ')
h(' { v_v_outputFile20___ = new Vector() } ')
h(' v_v_outputFile20___.record(&a_pyramidals_48[11].apical4.v(0.5)) ')
h.v_v_outputFile20___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/12/pyr_4_sym/4/v
h(' objectvar v_v_outputFile20____ ')
h(' { v_v_outputFile20____ = new Vector() } ')
h(' v_v_outputFile20____.record(&a_pyramidals_48[12].apical4.v(0.5)) ')
h.v_v_outputFile20____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/13/pyr_4_sym/4/v
h(' objectvar v_v_outputFile20_____ ')
h(' { v_v_outputFile20_____ = new Vector() } ')
h(' v_v_outputFile20_____.record(&a_pyramidals_48[13].apical4.v(0.5)) ')
h.v_v_outputFile20_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/14/pyr_4_sym/4/v
h(' objectvar v_v_outputFile20______ ')
h(' { v_v_outputFile20______ = new Vector() } ')
h(' v_v_outputFile20______.record(&a_pyramidals_48[14].apical4.v(0.5)) ')
h.v_v_outputFile20______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/15/pyr_4_sym/4/v
h(' objectvar v_v_outputFile20_______ ')
h(' { v_v_outputFile20_______ = new Vector() } ')
h(' v_v_outputFile20_______.record(&a_pyramidals_48[15].apical4.v(0.5)) ')
h.v_v_outputFile20_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/16/pyr_4_sym/4/v
h(' objectvar v_v_outputFile20________ ')
h(' { v_v_outputFile20________ = new Vector() } ')
h(' v_v_outputFile20________.record(&a_pyramidals_48[16].apical4.v(0.5)) ')
h.v_v_outputFile20________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/17/pyr_4_sym/4/v
h(' objectvar v_v_outputFile20_________ ')
h(' { v_v_outputFile20_________ = new Vector() } ')
h(' v_v_outputFile20_________.record(&a_pyramidals_48[17].apical4.v(0.5)) ')
h.v_v_outputFile20_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile45
# Column: baskets_12/6/bask/1/v
h(' objectvar v_v_outputFile45 ')
h(' { v_v_outputFile45 = new Vector() } ')
h(' v_v_outputFile45.record(&a_baskets_12[6].dend.v(0.5)) ')
h.v_v_outputFile45.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/7/bask/1/v
h(' objectvar v_v_outputFile45_ ')
h(' { v_v_outputFile45_ = new Vector() } ')
h(' v_v_outputFile45_.record(&a_baskets_12[7].dend.v(0.5)) ')
h.v_v_outputFile45_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/8/bask/1/v
h(' objectvar v_v_outputFile45__ ')
h(' { v_v_outputFile45__ = new Vector() } ')
h(' v_v_outputFile45__.record(&a_baskets_12[8].dend.v(0.5)) ')
h.v_v_outputFile45__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/9/bask/1/v
h(' objectvar v_v_outputFile45___ ')
h(' { v_v_outputFile45___ = new Vector() } ')
h(' v_v_outputFile45___.record(&a_baskets_12[9].dend.v(0.5)) ')
h.v_v_outputFile45___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/10/bask/1/v
h(' objectvar v_v_outputFile45____ ')
h(' { v_v_outputFile45____ = new Vector() } ')
h(' v_v_outputFile45____.record(&a_baskets_12[10].dend.v(0.5)) ')
h.v_v_outputFile45____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/11/bask/1/v
h(' objectvar v_v_outputFile45_____ ')
h(' { v_v_outputFile45_____ = new Vector() } ')
h(' v_v_outputFile45_____.record(&a_baskets_12[11].dend.v(0.5)) ')
h.v_v_outputFile45_____.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile6
# Column: pyramidals_48/12/pyr_4_sym/1/v
h(' objectvar v_v_outputFile6 ')
h(' { v_v_outputFile6 = new Vector() } ')
h(' v_v_outputFile6.record(&a_pyramidals_48[12].apical0.v(0.5)) ')
h.v_v_outputFile6.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/13/pyr_4_sym/1/v
h(' objectvar v_v_outputFile6_ ')
h(' { v_v_outputFile6_ = new Vector() } ')
h(' v_v_outputFile6_.record(&a_pyramidals_48[13].apical0.v(0.5)) ')
h.v_v_outputFile6_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/14/pyr_4_sym/1/v
h(' objectvar v_v_outputFile6__ ')
h(' { v_v_outputFile6__ = new Vector() } ')
h(' v_v_outputFile6__.record(&a_pyramidals_48[14].apical0.v(0.5)) ')
h.v_v_outputFile6__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/15/pyr_4_sym/1/v
h(' objectvar v_v_outputFile6___ ')
h(' { v_v_outputFile6___ = new Vector() } ')
h(' v_v_outputFile6___.record(&a_pyramidals_48[15].apical0.v(0.5)) ')
h.v_v_outputFile6___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/16/pyr_4_sym/1/v
h(' objectvar v_v_outputFile6____ ')
h(' { v_v_outputFile6____ = new Vector() } ')
h(' v_v_outputFile6____.record(&a_pyramidals_48[16].apical0.v(0.5)) ')
h.v_v_outputFile6____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/17/pyr_4_sym/1/v
h(' objectvar v_v_outputFile6_____ ')
h(' { v_v_outputFile6_____ = new Vector() } ')
h(' v_v_outputFile6_____.record(&a_pyramidals_48[17].apical0.v(0.5)) ')
h.v_v_outputFile6_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/18/pyr_4_sym/1/v
h(' objectvar v_v_outputFile6______ ')
h(' { v_v_outputFile6______ = new Vector() } ')
h(' v_v_outputFile6______.record(&a_pyramidals_48[18].apical0.v(0.5)) ')
h.v_v_outputFile6______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/19/pyr_4_sym/1/v
h(' objectvar v_v_outputFile6_______ ')
h(' { v_v_outputFile6_______ = new Vector() } ')
h(' v_v_outputFile6_______.record(&a_pyramidals_48[19].apical0.v(0.5)) ')
h.v_v_outputFile6_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/20/pyr_4_sym/1/v
h(' objectvar v_v_outputFile6________ ')
h(' { v_v_outputFile6________ = new Vector() } ')
h(' v_v_outputFile6________.record(&a_pyramidals_48[20].apical0.v(0.5)) ')
h.v_v_outputFile6________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/21/pyr_4_sym/1/v
h(' objectvar v_v_outputFile6_________ ')
h(' { v_v_outputFile6_________ = new Vector() } ')
h(' v_v_outputFile6_________.record(&a_pyramidals_48[21].apical0.v(0.5)) ')
h.v_v_outputFile6_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile44
# Column: baskets_12/8/bask/0/v
h(' objectvar v_v_outputFile44 ')
h(' { v_v_outputFile44 = new Vector() } ')
h(' v_v_outputFile44.record(&a_baskets_12[8].soma.v(0.5)) ')
h.v_v_outputFile44.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/9/bask/0/v
h(' objectvar v_v_outputFile44_ ')
h(' { v_v_outputFile44_ = new Vector() } ')
h(' v_v_outputFile44_.record(&a_baskets_12[9].soma.v(0.5)) ')
h.v_v_outputFile44_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/10/bask/0/v
h(' objectvar v_v_outputFile44__ ')
h(' { v_v_outputFile44__ = new Vector() } ')
h(' v_v_outputFile44__.record(&a_baskets_12[10].soma.v(0.5)) ')
h.v_v_outputFile44__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/11/bask/0/v
h(' objectvar v_v_outputFile44___ ')
h(' { v_v_outputFile44___ = new Vector() } ')
h(' v_v_outputFile44___.record(&a_baskets_12[11].soma.v(0.5)) ')
h.v_v_outputFile44___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/0/bask/1/v
h(' objectvar v_v_outputFile44____ ')
h(' { v_v_outputFile44____ = new Vector() } ')
h(' v_v_outputFile44____.record(&a_baskets_12[0].dend.v(0.5)) ')
h.v_v_outputFile44____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/1/bask/1/v
h(' objectvar v_v_outputFile44_____ ')
h(' { v_v_outputFile44_____ = new Vector() } ')
h(' v_v_outputFile44_____.record(&a_baskets_12[1].dend.v(0.5)) ')
h.v_v_outputFile44_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/2/bask/1/v
h(' objectvar v_v_outputFile44______ ')
h(' { v_v_outputFile44______ = new Vector() } ')
h(' v_v_outputFile44______.record(&a_baskets_12[2].dend.v(0.5)) ')
h.v_v_outputFile44______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/3/bask/1/v
h(' objectvar v_v_outputFile44_______ ')
h(' { v_v_outputFile44_______ = new Vector() } ')
h(' v_v_outputFile44_______.record(&a_baskets_12[3].dend.v(0.5)) ')
h.v_v_outputFile44_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/4/bask/1/v
h(' objectvar v_v_outputFile44________ ')
h(' { v_v_outputFile44________ = new Vector() } ')
h(' v_v_outputFile44________.record(&a_baskets_12[4].dend.v(0.5)) ')
h.v_v_outputFile44________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/5/bask/1/v
h(' objectvar v_v_outputFile44_________ ')
h(' { v_v_outputFile44_________ = new Vector() } ')
h(' v_v_outputFile44_________.record(&a_baskets_12[5].dend.v(0.5)) ')
h.v_v_outputFile44_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile7
# Column: pyramidals_48/22/pyr_4_sym/1/v
h(' objectvar v_v_outputFile7 ')
h(' { v_v_outputFile7 = new Vector() } ')
h(' v_v_outputFile7.record(&a_pyramidals_48[22].apical0.v(0.5)) ')
h.v_v_outputFile7.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/23/pyr_4_sym/1/v
h(' objectvar v_v_outputFile7_ ')
h(' { v_v_outputFile7_ = new Vector() } ')
h(' v_v_outputFile7_.record(&a_pyramidals_48[23].apical0.v(0.5)) ')
h.v_v_outputFile7_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/24/pyr_4_sym/1/v
h(' objectvar v_v_outputFile7__ ')
h(' { v_v_outputFile7__ = new Vector() } ')
h(' v_v_outputFile7__.record(&a_pyramidals_48[24].apical0.v(0.5)) ')
h.v_v_outputFile7__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/25/pyr_4_sym/1/v
h(' objectvar v_v_outputFile7___ ')
h(' { v_v_outputFile7___ = new Vector() } ')
h(' v_v_outputFile7___.record(&a_pyramidals_48[25].apical0.v(0.5)) ')
h.v_v_outputFile7___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/26/pyr_4_sym/1/v
h(' objectvar v_v_outputFile7____ ')
h(' { v_v_outputFile7____ = new Vector() } ')
h(' v_v_outputFile7____.record(&a_pyramidals_48[26].apical0.v(0.5)) ')
h.v_v_outputFile7____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/27/pyr_4_sym/1/v
h(' objectvar v_v_outputFile7_____ ')
h(' { v_v_outputFile7_____ = new Vector() } ')
h(' v_v_outputFile7_____.record(&a_pyramidals_48[27].apical0.v(0.5)) ')
h.v_v_outputFile7_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/28/pyr_4_sym/1/v
h(' objectvar v_v_outputFile7______ ')
h(' { v_v_outputFile7______ = new Vector() } ')
h(' v_v_outputFile7______.record(&a_pyramidals_48[28].apical0.v(0.5)) ')
h.v_v_outputFile7______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/29/pyr_4_sym/1/v
h(' objectvar v_v_outputFile7_______ ')
h(' { v_v_outputFile7_______ = new Vector() } ')
h(' v_v_outputFile7_______.record(&a_pyramidals_48[29].apical0.v(0.5)) ')
h.v_v_outputFile7_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/30/pyr_4_sym/1/v
h(' objectvar v_v_outputFile7________ ')
h(' { v_v_outputFile7________ = new Vector() } ')
h(' v_v_outputFile7________.record(&a_pyramidals_48[30].apical0.v(0.5)) ')
h.v_v_outputFile7________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/31/pyr_4_sym/1/v
h(' objectvar v_v_outputFile7_________ ')
h(' { v_v_outputFile7_________ = new Vector() } ')
h(' v_v_outputFile7_________.record(&a_pyramidals_48[31].apical0.v(0.5)) ')
h.v_v_outputFile7_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile43
# Column: pyramidals_48/46/pyr_4_sym/8/v
h(' objectvar v_v_outputFile43 ')
h(' { v_v_outputFile43 = new Vector() } ')
h(' v_v_outputFile43.record(&a_pyramidals_48[46].basal2.v(0.5)) ')
h.v_v_outputFile43.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/47/pyr_4_sym/8/v
h(' objectvar v_v_outputFile43_ ')
h(' { v_v_outputFile43_ = new Vector() } ')
h(' v_v_outputFile43_.record(&a_pyramidals_48[47].basal2.v(0.5)) ')
h.v_v_outputFile43_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/0/bask/0/v
h(' objectvar v_v_outputFile43__ ')
h(' { v_v_outputFile43__ = new Vector() } ')
h(' v_v_outputFile43__.record(&a_baskets_12[0].soma.v(0.5)) ')
h.v_v_outputFile43__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/1/bask/0/v
h(' objectvar v_v_outputFile43___ ')
h(' { v_v_outputFile43___ = new Vector() } ')
h(' v_v_outputFile43___.record(&a_baskets_12[1].soma.v(0.5)) ')
h.v_v_outputFile43___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/2/bask/0/v
h(' objectvar v_v_outputFile43____ ')
h(' { v_v_outputFile43____ = new Vector() } ')
h(' v_v_outputFile43____.record(&a_baskets_12[2].soma.v(0.5)) ')
h.v_v_outputFile43____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/3/bask/0/v
h(' objectvar v_v_outputFile43_____ ')
h(' { v_v_outputFile43_____ = new Vector() } ')
h(' v_v_outputFile43_____.record(&a_baskets_12[3].soma.v(0.5)) ')
h.v_v_outputFile43_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/4/bask/0/v
h(' objectvar v_v_outputFile43______ ')
h(' { v_v_outputFile43______ = new Vector() } ')
h(' v_v_outputFile43______.record(&a_baskets_12[4].soma.v(0.5)) ')
h.v_v_outputFile43______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/5/bask/0/v
h(' objectvar v_v_outputFile43_______ ')
h(' { v_v_outputFile43_______ = new Vector() } ')
h(' v_v_outputFile43_______.record(&a_baskets_12[5].soma.v(0.5)) ')
h.v_v_outputFile43_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/6/bask/0/v
h(' objectvar v_v_outputFile43________ ')
h(' { v_v_outputFile43________ = new Vector() } ')
h(' v_v_outputFile43________.record(&a_baskets_12[6].soma.v(0.5)) ')
h.v_v_outputFile43________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: baskets_12/7/bask/0/v
h(' objectvar v_v_outputFile43_________ ')
h(' { v_v_outputFile43_________ = new Vector() } ')
h(' v_v_outputFile43_________.record(&a_baskets_12[7].soma.v(0.5)) ')
h.v_v_outputFile43_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile8
# Column: pyramidals_48/32/pyr_4_sym/1/v
h(' objectvar v_v_outputFile8 ')
h(' { v_v_outputFile8 = new Vector() } ')
h(' v_v_outputFile8.record(&a_pyramidals_48[32].apical0.v(0.5)) ')
h.v_v_outputFile8.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/33/pyr_4_sym/1/v
h(' objectvar v_v_outputFile8_ ')
h(' { v_v_outputFile8_ = new Vector() } ')
h(' v_v_outputFile8_.record(&a_pyramidals_48[33].apical0.v(0.5)) ')
h.v_v_outputFile8_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/34/pyr_4_sym/1/v
h(' objectvar v_v_outputFile8__ ')
h(' { v_v_outputFile8__ = new Vector() } ')
h(' v_v_outputFile8__.record(&a_pyramidals_48[34].apical0.v(0.5)) ')
h.v_v_outputFile8__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/35/pyr_4_sym/1/v
h(' objectvar v_v_outputFile8___ ')
h(' { v_v_outputFile8___ = new Vector() } ')
h(' v_v_outputFile8___.record(&a_pyramidals_48[35].apical0.v(0.5)) ')
h.v_v_outputFile8___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/36/pyr_4_sym/1/v
h(' objectvar v_v_outputFile8____ ')
h(' { v_v_outputFile8____ = new Vector() } ')
h(' v_v_outputFile8____.record(&a_pyramidals_48[36].apical0.v(0.5)) ')
h.v_v_outputFile8____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/37/pyr_4_sym/1/v
h(' objectvar v_v_outputFile8_____ ')
h(' { v_v_outputFile8_____ = new Vector() } ')
h(' v_v_outputFile8_____.record(&a_pyramidals_48[37].apical0.v(0.5)) ')
h.v_v_outputFile8_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/38/pyr_4_sym/1/v
h(' objectvar v_v_outputFile8______ ')
h(' { v_v_outputFile8______ = new Vector() } ')
h(' v_v_outputFile8______.record(&a_pyramidals_48[38].apical0.v(0.5)) ')
h.v_v_outputFile8______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/39/pyr_4_sym/1/v
h(' objectvar v_v_outputFile8_______ ')
h(' { v_v_outputFile8_______ = new Vector() } ')
h(' v_v_outputFile8_______.record(&a_pyramidals_48[39].apical0.v(0.5)) ')
h.v_v_outputFile8_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/40/pyr_4_sym/1/v
h(' objectvar v_v_outputFile8________ ')
h(' { v_v_outputFile8________ = new Vector() } ')
h(' v_v_outputFile8________.record(&a_pyramidals_48[40].apical0.v(0.5)) ')
h.v_v_outputFile8________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/41/pyr_4_sym/1/v
h(' objectvar v_v_outputFile8_________ ')
h(' { v_v_outputFile8_________ = new Vector() } ')
h(' v_v_outputFile8_________.record(&a_pyramidals_48[41].apical0.v(0.5)) ')
h.v_v_outputFile8_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile42
# Column: pyramidals_48/36/pyr_4_sym/8/v
h(' objectvar v_v_outputFile42 ')
h(' { v_v_outputFile42 = new Vector() } ')
h(' v_v_outputFile42.record(&a_pyramidals_48[36].basal2.v(0.5)) ')
h.v_v_outputFile42.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/37/pyr_4_sym/8/v
h(' objectvar v_v_outputFile42_ ')
h(' { v_v_outputFile42_ = new Vector() } ')
h(' v_v_outputFile42_.record(&a_pyramidals_48[37].basal2.v(0.5)) ')
h.v_v_outputFile42_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/38/pyr_4_sym/8/v
h(' objectvar v_v_outputFile42__ ')
h(' { v_v_outputFile42__ = new Vector() } ')
h(' v_v_outputFile42__.record(&a_pyramidals_48[38].basal2.v(0.5)) ')
h.v_v_outputFile42__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/39/pyr_4_sym/8/v
h(' objectvar v_v_outputFile42___ ')
h(' { v_v_outputFile42___ = new Vector() } ')
h(' v_v_outputFile42___.record(&a_pyramidals_48[39].basal2.v(0.5)) ')
h.v_v_outputFile42___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/40/pyr_4_sym/8/v
h(' objectvar v_v_outputFile42____ ')
h(' { v_v_outputFile42____ = new Vector() } ')
h(' v_v_outputFile42____.record(&a_pyramidals_48[40].basal2.v(0.5)) ')
h.v_v_outputFile42____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/41/pyr_4_sym/8/v
h(' objectvar v_v_outputFile42_____ ')
h(' { v_v_outputFile42_____ = new Vector() } ')
h(' v_v_outputFile42_____.record(&a_pyramidals_48[41].basal2.v(0.5)) ')
h.v_v_outputFile42_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/42/pyr_4_sym/8/v
h(' objectvar v_v_outputFile42______ ')
h(' { v_v_outputFile42______ = new Vector() } ')
h(' v_v_outputFile42______.record(&a_pyramidals_48[42].basal2.v(0.5)) ')
h.v_v_outputFile42______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/43/pyr_4_sym/8/v
h(' objectvar v_v_outputFile42_______ ')
h(' { v_v_outputFile42_______ = new Vector() } ')
h(' v_v_outputFile42_______.record(&a_pyramidals_48[43].basal2.v(0.5)) ')
h.v_v_outputFile42_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/44/pyr_4_sym/8/v
h(' objectvar v_v_outputFile42________ ')
h(' { v_v_outputFile42________ = new Vector() } ')
h(' v_v_outputFile42________.record(&a_pyramidals_48[44].basal2.v(0.5)) ')
h.v_v_outputFile42________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/45/pyr_4_sym/8/v
h(' objectvar v_v_outputFile42_________ ')
h(' { v_v_outputFile42_________ = new Vector() } ')
h(' v_v_outputFile42_________.record(&a_pyramidals_48[45].basal2.v(0.5)) ')
h.v_v_outputFile42_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile24
# Column: pyramidals_48/0/pyr_4_sym/5/v
h(' objectvar v_v_outputFile24 ')
h(' { v_v_outputFile24 = new Vector() } ')
h(' v_v_outputFile24.record(&a_pyramidals_48[0].apical1.v(0.5)) ')
h.v_v_outputFile24.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/1/pyr_4_sym/5/v
h(' objectvar v_v_outputFile24_ ')
h(' { v_v_outputFile24_ = new Vector() } ')
h(' v_v_outputFile24_.record(&a_pyramidals_48[1].apical1.v(0.5)) ')
h.v_v_outputFile24_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/2/pyr_4_sym/5/v
h(' objectvar v_v_outputFile24__ ')
h(' { v_v_outputFile24__ = new Vector() } ')
h(' v_v_outputFile24__.record(&a_pyramidals_48[2].apical1.v(0.5)) ')
h.v_v_outputFile24__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/3/pyr_4_sym/5/v
h(' objectvar v_v_outputFile24___ ')
h(' { v_v_outputFile24___ = new Vector() } ')
h(' v_v_outputFile24___.record(&a_pyramidals_48[3].apical1.v(0.5)) ')
h.v_v_outputFile24___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/4/pyr_4_sym/5/v
h(' objectvar v_v_outputFile24____ ')
h(' { v_v_outputFile24____ = new Vector() } ')
h(' v_v_outputFile24____.record(&a_pyramidals_48[4].apical1.v(0.5)) ')
h.v_v_outputFile24____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/5/pyr_4_sym/5/v
h(' objectvar v_v_outputFile24_____ ')
h(' { v_v_outputFile24_____ = new Vector() } ')
h(' v_v_outputFile24_____.record(&a_pyramidals_48[5].apical1.v(0.5)) ')
h.v_v_outputFile24_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/6/pyr_4_sym/5/v
h(' objectvar v_v_outputFile24______ ')
h(' { v_v_outputFile24______ = new Vector() } ')
h(' v_v_outputFile24______.record(&a_pyramidals_48[6].apical1.v(0.5)) ')
h.v_v_outputFile24______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/7/pyr_4_sym/5/v
h(' objectvar v_v_outputFile24_______ ')
h(' { v_v_outputFile24_______ = new Vector() } ')
h(' v_v_outputFile24_______.record(&a_pyramidals_48[7].apical1.v(0.5)) ')
h.v_v_outputFile24_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/8/pyr_4_sym/5/v
h(' objectvar v_v_outputFile24________ ')
h(' { v_v_outputFile24________ = new Vector() } ')
h(' v_v_outputFile24________.record(&a_pyramidals_48[8].apical1.v(0.5)) ')
h.v_v_outputFile24________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/9/pyr_4_sym/5/v
h(' objectvar v_v_outputFile24_________ ')
h(' { v_v_outputFile24_________ = new Vector() } ')
h(' v_v_outputFile24_________.record(&a_pyramidals_48[9].apical1.v(0.5)) ')
h.v_v_outputFile24_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile9
# Column: pyramidals_48/42/pyr_4_sym/1/v
h(' objectvar v_v_outputFile9 ')
h(' { v_v_outputFile9 = new Vector() } ')
h(' v_v_outputFile9.record(&a_pyramidals_48[42].apical0.v(0.5)) ')
h.v_v_outputFile9.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/43/pyr_4_sym/1/v
h(' objectvar v_v_outputFile9_ ')
h(' { v_v_outputFile9_ = new Vector() } ')
h(' v_v_outputFile9_.record(&a_pyramidals_48[43].apical0.v(0.5)) ')
h.v_v_outputFile9_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/44/pyr_4_sym/1/v
h(' objectvar v_v_outputFile9__ ')
h(' { v_v_outputFile9__ = new Vector() } ')
h(' v_v_outputFile9__.record(&a_pyramidals_48[44].apical0.v(0.5)) ')
h.v_v_outputFile9__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/45/pyr_4_sym/1/v
h(' objectvar v_v_outputFile9___ ')
h(' { v_v_outputFile9___ = new Vector() } ')
h(' v_v_outputFile9___.record(&a_pyramidals_48[45].apical0.v(0.5)) ')
h.v_v_outputFile9___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/46/pyr_4_sym/1/v
h(' objectvar v_v_outputFile9____ ')
h(' { v_v_outputFile9____ = new Vector() } ')
h(' v_v_outputFile9____.record(&a_pyramidals_48[46].apical0.v(0.5)) ')
h.v_v_outputFile9____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/47/pyr_4_sym/1/v
h(' objectvar v_v_outputFile9_____ ')
h(' { v_v_outputFile9_____ = new Vector() } ')
h(' v_v_outputFile9_____.record(&a_pyramidals_48[47].apical0.v(0.5)) ')
h.v_v_outputFile9_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/0/pyr_4_sym/2/v
h(' objectvar v_v_outputFile9______ ')
h(' { v_v_outputFile9______ = new Vector() } ')
h(' v_v_outputFile9______.record(&a_pyramidals_48[0].apical2.v(0.5)) ')
h.v_v_outputFile9______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/1/pyr_4_sym/2/v
h(' objectvar v_v_outputFile9_______ ')
h(' { v_v_outputFile9_______ = new Vector() } ')
h(' v_v_outputFile9_______.record(&a_pyramidals_48[1].apical2.v(0.5)) ')
h.v_v_outputFile9_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/2/pyr_4_sym/2/v
h(' objectvar v_v_outputFile9________ ')
h(' { v_v_outputFile9________ = new Vector() } ')
h(' v_v_outputFile9________.record(&a_pyramidals_48[2].apical2.v(0.5)) ')
h.v_v_outputFile9________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/3/pyr_4_sym/2/v
h(' objectvar v_v_outputFile9_________ ')
h(' { v_v_outputFile9_________ = new Vector() } ')
h(' v_v_outputFile9_________.record(&a_pyramidals_48[3].apical2.v(0.5)) ')
h.v_v_outputFile9_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile41
# Column: pyramidals_48/26/pyr_4_sym/8/v
h(' objectvar v_v_outputFile41 ')
h(' { v_v_outputFile41 = new Vector() } ')
h(' v_v_outputFile41.record(&a_pyramidals_48[26].basal2.v(0.5)) ')
h.v_v_outputFile41.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/27/pyr_4_sym/8/v
h(' objectvar v_v_outputFile41_ ')
h(' { v_v_outputFile41_ = new Vector() } ')
h(' v_v_outputFile41_.record(&a_pyramidals_48[27].basal2.v(0.5)) ')
h.v_v_outputFile41_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/28/pyr_4_sym/8/v
h(' objectvar v_v_outputFile41__ ')
h(' { v_v_outputFile41__ = new Vector() } ')
h(' v_v_outputFile41__.record(&a_pyramidals_48[28].basal2.v(0.5)) ')
h.v_v_outputFile41__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/29/pyr_4_sym/8/v
h(' objectvar v_v_outputFile41___ ')
h(' { v_v_outputFile41___ = new Vector() } ')
h(' v_v_outputFile41___.record(&a_pyramidals_48[29].basal2.v(0.5)) ')
h.v_v_outputFile41___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/30/pyr_4_sym/8/v
h(' objectvar v_v_outputFile41____ ')
h(' { v_v_outputFile41____ = new Vector() } ')
h(' v_v_outputFile41____.record(&a_pyramidals_48[30].basal2.v(0.5)) ')
h.v_v_outputFile41____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/31/pyr_4_sym/8/v
h(' objectvar v_v_outputFile41_____ ')
h(' { v_v_outputFile41_____ = new Vector() } ')
h(' v_v_outputFile41_____.record(&a_pyramidals_48[31].basal2.v(0.5)) ')
h.v_v_outputFile41_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/32/pyr_4_sym/8/v
h(' objectvar v_v_outputFile41______ ')
h(' { v_v_outputFile41______ = new Vector() } ')
h(' v_v_outputFile41______.record(&a_pyramidals_48[32].basal2.v(0.5)) ')
h.v_v_outputFile41______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/33/pyr_4_sym/8/v
h(' objectvar v_v_outputFile41_______ ')
h(' { v_v_outputFile41_______ = new Vector() } ')
h(' v_v_outputFile41_______.record(&a_pyramidals_48[33].basal2.v(0.5)) ')
h.v_v_outputFile41_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/34/pyr_4_sym/8/v
h(' objectvar v_v_outputFile41________ ')
h(' { v_v_outputFile41________ = new Vector() } ')
h(' v_v_outputFile41________.record(&a_pyramidals_48[34].basal2.v(0.5)) ')
h.v_v_outputFile41________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/35/pyr_4_sym/8/v
h(' objectvar v_v_outputFile41_________ ')
h(' { v_v_outputFile41_________ = new Vector() } ')
h(' v_v_outputFile41_________.record(&a_pyramidals_48[35].basal2.v(0.5)) ')
h.v_v_outputFile41_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile23
# Column: pyramidals_48/38/pyr_4_sym/4/v
h(' objectvar v_v_outputFile23 ')
h(' { v_v_outputFile23 = new Vector() } ')
h(' v_v_outputFile23.record(&a_pyramidals_48[38].apical4.v(0.5)) ')
h.v_v_outputFile23.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/39/pyr_4_sym/4/v
h(' objectvar v_v_outputFile23_ ')
h(' { v_v_outputFile23_ = new Vector() } ')
h(' v_v_outputFile23_.record(&a_pyramidals_48[39].apical4.v(0.5)) ')
h.v_v_outputFile23_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/40/pyr_4_sym/4/v
h(' objectvar v_v_outputFile23__ ')
h(' { v_v_outputFile23__ = new Vector() } ')
h(' v_v_outputFile23__.record(&a_pyramidals_48[40].apical4.v(0.5)) ')
h.v_v_outputFile23__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/41/pyr_4_sym/4/v
h(' objectvar v_v_outputFile23___ ')
h(' { v_v_outputFile23___ = new Vector() } ')
h(' v_v_outputFile23___.record(&a_pyramidals_48[41].apical4.v(0.5)) ')
h.v_v_outputFile23___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/42/pyr_4_sym/4/v
h(' objectvar v_v_outputFile23____ ')
h(' { v_v_outputFile23____ = new Vector() } ')
h(' v_v_outputFile23____.record(&a_pyramidals_48[42].apical4.v(0.5)) ')
h.v_v_outputFile23____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/43/pyr_4_sym/4/v
h(' objectvar v_v_outputFile23_____ ')
h(' { v_v_outputFile23_____ = new Vector() } ')
h(' v_v_outputFile23_____.record(&a_pyramidals_48[43].apical4.v(0.5)) ')
h.v_v_outputFile23_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/44/pyr_4_sym/4/v
h(' objectvar v_v_outputFile23______ ')
h(' { v_v_outputFile23______ = new Vector() } ')
h(' v_v_outputFile23______.record(&a_pyramidals_48[44].apical4.v(0.5)) ')
h.v_v_outputFile23______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/45/pyr_4_sym/4/v
h(' objectvar v_v_outputFile23_______ ')
h(' { v_v_outputFile23_______ = new Vector() } ')
h(' v_v_outputFile23_______.record(&a_pyramidals_48[45].apical4.v(0.5)) ')
h.v_v_outputFile23_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/46/pyr_4_sym/4/v
h(' objectvar v_v_outputFile23________ ')
h(' { v_v_outputFile23________ = new Vector() } ')
h(' v_v_outputFile23________.record(&a_pyramidals_48[46].apical4.v(0.5)) ')
h.v_v_outputFile23________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/47/pyr_4_sym/4/v
h(' objectvar v_v_outputFile23_________ ')
h(' { v_v_outputFile23_________ = new Vector() } ')
h(' v_v_outputFile23_________.record(&a_pyramidals_48[47].apical4.v(0.5)) ')
h.v_v_outputFile23_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile40
# Column: pyramidals_48/16/pyr_4_sym/8/v
h(' objectvar v_v_outputFile40 ')
h(' { v_v_outputFile40 = new Vector() } ')
h(' v_v_outputFile40.record(&a_pyramidals_48[16].basal2.v(0.5)) ')
h.v_v_outputFile40.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/17/pyr_4_sym/8/v
h(' objectvar v_v_outputFile40_ ')
h(' { v_v_outputFile40_ = new Vector() } ')
h(' v_v_outputFile40_.record(&a_pyramidals_48[17].basal2.v(0.5)) ')
h.v_v_outputFile40_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/18/pyr_4_sym/8/v
h(' objectvar v_v_outputFile40__ ')
h(' { v_v_outputFile40__ = new Vector() } ')
h(' v_v_outputFile40__.record(&a_pyramidals_48[18].basal2.v(0.5)) ')
h.v_v_outputFile40__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/19/pyr_4_sym/8/v
h(' objectvar v_v_outputFile40___ ')
h(' { v_v_outputFile40___ = new Vector() } ')
h(' v_v_outputFile40___.record(&a_pyramidals_48[19].basal2.v(0.5)) ')
h.v_v_outputFile40___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/20/pyr_4_sym/8/v
h(' objectvar v_v_outputFile40____ ')
h(' { v_v_outputFile40____ = new Vector() } ')
h(' v_v_outputFile40____.record(&a_pyramidals_48[20].basal2.v(0.5)) ')
h.v_v_outputFile40____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/21/pyr_4_sym/8/v
h(' objectvar v_v_outputFile40_____ ')
h(' { v_v_outputFile40_____ = new Vector() } ')
h(' v_v_outputFile40_____.record(&a_pyramidals_48[21].basal2.v(0.5)) ')
h.v_v_outputFile40_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/22/pyr_4_sym/8/v
h(' objectvar v_v_outputFile40______ ')
h(' { v_v_outputFile40______ = new Vector() } ')
h(' v_v_outputFile40______.record(&a_pyramidals_48[22].basal2.v(0.5)) ')
h.v_v_outputFile40______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/23/pyr_4_sym/8/v
h(' objectvar v_v_outputFile40_______ ')
h(' { v_v_outputFile40_______ = new Vector() } ')
h(' v_v_outputFile40_______.record(&a_pyramidals_48[23].basal2.v(0.5)) ')
h.v_v_outputFile40_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/24/pyr_4_sym/8/v
h(' objectvar v_v_outputFile40________ ')
h(' { v_v_outputFile40________ = new Vector() } ')
h(' v_v_outputFile40________.record(&a_pyramidals_48[24].basal2.v(0.5)) ')
h.v_v_outputFile40________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/25/pyr_4_sym/8/v
h(' objectvar v_v_outputFile40_________ ')
h(' { v_v_outputFile40_________ = new Vector() } ')
h(' v_v_outputFile40_________.record(&a_pyramidals_48[25].basal2.v(0.5)) ')
h.v_v_outputFile40_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile22
# Column: pyramidals_48/28/pyr_4_sym/4/v
h(' objectvar v_v_outputFile22 ')
h(' { v_v_outputFile22 = new Vector() } ')
h(' v_v_outputFile22.record(&a_pyramidals_48[28].apical4.v(0.5)) ')
h.v_v_outputFile22.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/29/pyr_4_sym/4/v
h(' objectvar v_v_outputFile22_ ')
h(' { v_v_outputFile22_ = new Vector() } ')
h(' v_v_outputFile22_.record(&a_pyramidals_48[29].apical4.v(0.5)) ')
h.v_v_outputFile22_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/30/pyr_4_sym/4/v
h(' objectvar v_v_outputFile22__ ')
h(' { v_v_outputFile22__ = new Vector() } ')
h(' v_v_outputFile22__.record(&a_pyramidals_48[30].apical4.v(0.5)) ')
h.v_v_outputFile22__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/31/pyr_4_sym/4/v
h(' objectvar v_v_outputFile22___ ')
h(' { v_v_outputFile22___ = new Vector() } ')
h(' v_v_outputFile22___.record(&a_pyramidals_48[31].apical4.v(0.5)) ')
h.v_v_outputFile22___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/32/pyr_4_sym/4/v
h(' objectvar v_v_outputFile22____ ')
h(' { v_v_outputFile22____ = new Vector() } ')
h(' v_v_outputFile22____.record(&a_pyramidals_48[32].apical4.v(0.5)) ')
h.v_v_outputFile22____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/33/pyr_4_sym/4/v
h(' objectvar v_v_outputFile22_____ ')
h(' { v_v_outputFile22_____ = new Vector() } ')
h(' v_v_outputFile22_____.record(&a_pyramidals_48[33].apical4.v(0.5)) ')
h.v_v_outputFile22_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/34/pyr_4_sym/4/v
h(' objectvar v_v_outputFile22______ ')
h(' { v_v_outputFile22______ = new Vector() } ')
h(' v_v_outputFile22______.record(&a_pyramidals_48[34].apical4.v(0.5)) ')
h.v_v_outputFile22______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/35/pyr_4_sym/4/v
h(' objectvar v_v_outputFile22_______ ')
h(' { v_v_outputFile22_______ = new Vector() } ')
h(' v_v_outputFile22_______.record(&a_pyramidals_48[35].apical4.v(0.5)) ')
h.v_v_outputFile22_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/36/pyr_4_sym/4/v
h(' objectvar v_v_outputFile22________ ')
h(' { v_v_outputFile22________ = new Vector() } ')
h(' v_v_outputFile22________.record(&a_pyramidals_48[36].apical4.v(0.5)) ')
h.v_v_outputFile22________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/37/pyr_4_sym/4/v
h(' objectvar v_v_outputFile22_________ ')
h(' { v_v_outputFile22_________ = new Vector() } ')
h(' v_v_outputFile22_________.record(&a_pyramidals_48[37].apical4.v(0.5)) ')
h.v_v_outputFile22_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile21
# Column: pyramidals_48/18/pyr_4_sym/4/v
h(' objectvar v_v_outputFile21 ')
h(' { v_v_outputFile21 = new Vector() } ')
h(' v_v_outputFile21.record(&a_pyramidals_48[18].apical4.v(0.5)) ')
h.v_v_outputFile21.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/19/pyr_4_sym/4/v
h(' objectvar v_v_outputFile21_ ')
h(' { v_v_outputFile21_ = new Vector() } ')
h(' v_v_outputFile21_.record(&a_pyramidals_48[19].apical4.v(0.5)) ')
h.v_v_outputFile21_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/20/pyr_4_sym/4/v
h(' objectvar v_v_outputFile21__ ')
h(' { v_v_outputFile21__ = new Vector() } ')
h(' v_v_outputFile21__.record(&a_pyramidals_48[20].apical4.v(0.5)) ')
h.v_v_outputFile21__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/21/pyr_4_sym/4/v
h(' objectvar v_v_outputFile21___ ')
h(' { v_v_outputFile21___ = new Vector() } ')
h(' v_v_outputFile21___.record(&a_pyramidals_48[21].apical4.v(0.5)) ')
h.v_v_outputFile21___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/22/pyr_4_sym/4/v
h(' objectvar v_v_outputFile21____ ')
h(' { v_v_outputFile21____ = new Vector() } ')
h(' v_v_outputFile21____.record(&a_pyramidals_48[22].apical4.v(0.5)) ')
h.v_v_outputFile21____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/23/pyr_4_sym/4/v
h(' objectvar v_v_outputFile21_____ ')
h(' { v_v_outputFile21_____ = new Vector() } ')
h(' v_v_outputFile21_____.record(&a_pyramidals_48[23].apical4.v(0.5)) ')
h.v_v_outputFile21_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/24/pyr_4_sym/4/v
h(' objectvar v_v_outputFile21______ ')
h(' { v_v_outputFile21______ = new Vector() } ')
h(' v_v_outputFile21______.record(&a_pyramidals_48[24].apical4.v(0.5)) ')
h.v_v_outputFile21______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/25/pyr_4_sym/4/v
h(' objectvar v_v_outputFile21_______ ')
h(' { v_v_outputFile21_______ = new Vector() } ')
h(' v_v_outputFile21_______.record(&a_pyramidals_48[25].apical4.v(0.5)) ')
h.v_v_outputFile21_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/26/pyr_4_sym/4/v
h(' objectvar v_v_outputFile21________ ')
h(' { v_v_outputFile21________ = new Vector() } ')
h(' v_v_outputFile21________.record(&a_pyramidals_48[26].apical4.v(0.5)) ')
h.v_v_outputFile21________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/27/pyr_4_sym/4/v
h(' objectvar v_v_outputFile21_________ ')
h(' { v_v_outputFile21_________ = new Vector() } ')
h(' v_v_outputFile21_________.record(&a_pyramidals_48[27].apical4.v(0.5)) ')
h.v_v_outputFile21_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile25
# Column: pyramidals_48/10/pyr_4_sym/5/v
h(' objectvar v_v_outputFile25 ')
h(' { v_v_outputFile25 = new Vector() } ')
h(' v_v_outputFile25.record(&a_pyramidals_48[10].apical1.v(0.5)) ')
h.v_v_outputFile25.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/11/pyr_4_sym/5/v
h(' objectvar v_v_outputFile25_ ')
h(' { v_v_outputFile25_ = new Vector() } ')
h(' v_v_outputFile25_.record(&a_pyramidals_48[11].apical1.v(0.5)) ')
h.v_v_outputFile25_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/12/pyr_4_sym/5/v
h(' objectvar v_v_outputFile25__ ')
h(' { v_v_outputFile25__ = new Vector() } ')
h(' v_v_outputFile25__.record(&a_pyramidals_48[12].apical1.v(0.5)) ')
h.v_v_outputFile25__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/13/pyr_4_sym/5/v
h(' objectvar v_v_outputFile25___ ')
h(' { v_v_outputFile25___ = new Vector() } ')
h(' v_v_outputFile25___.record(&a_pyramidals_48[13].apical1.v(0.5)) ')
h.v_v_outputFile25___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/14/pyr_4_sym/5/v
h(' objectvar v_v_outputFile25____ ')
h(' { v_v_outputFile25____ = new Vector() } ')
h(' v_v_outputFile25____.record(&a_pyramidals_48[14].apical1.v(0.5)) ')
h.v_v_outputFile25____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/15/pyr_4_sym/5/v
h(' objectvar v_v_outputFile25_____ ')
h(' { v_v_outputFile25_____ = new Vector() } ')
h(' v_v_outputFile25_____.record(&a_pyramidals_48[15].apical1.v(0.5)) ')
h.v_v_outputFile25_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/16/pyr_4_sym/5/v
h(' objectvar v_v_outputFile25______ ')
h(' { v_v_outputFile25______ = new Vector() } ')
h(' v_v_outputFile25______.record(&a_pyramidals_48[16].apical1.v(0.5)) ')
h.v_v_outputFile25______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/17/pyr_4_sym/5/v
h(' objectvar v_v_outputFile25_______ ')
h(' { v_v_outputFile25_______ = new Vector() } ')
h(' v_v_outputFile25_______.record(&a_pyramidals_48[17].apical1.v(0.5)) ')
h.v_v_outputFile25_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/18/pyr_4_sym/5/v
h(' objectvar v_v_outputFile25________ ')
h(' { v_v_outputFile25________ = new Vector() } ')
h(' v_v_outputFile25________.record(&a_pyramidals_48[18].apical1.v(0.5)) ')
h.v_v_outputFile25________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/19/pyr_4_sym/5/v
h(' objectvar v_v_outputFile25_________ ')
h(' { v_v_outputFile25_________ = new Vector() } ')
h(' v_v_outputFile25_________.record(&a_pyramidals_48[19].apical1.v(0.5)) ')
h.v_v_outputFile25_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile12
# Column: pyramidals_48/24/pyr_4_sym/2/v
h(' objectvar v_v_outputFile12 ')
h(' { v_v_outputFile12 = new Vector() } ')
h(' v_v_outputFile12.record(&a_pyramidals_48[24].apical2.v(0.5)) ')
h.v_v_outputFile12.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/25/pyr_4_sym/2/v
h(' objectvar v_v_outputFile12_ ')
h(' { v_v_outputFile12_ = new Vector() } ')
h(' v_v_outputFile12_.record(&a_pyramidals_48[25].apical2.v(0.5)) ')
h.v_v_outputFile12_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/26/pyr_4_sym/2/v
h(' objectvar v_v_outputFile12__ ')
h(' { v_v_outputFile12__ = new Vector() } ')
h(' v_v_outputFile12__.record(&a_pyramidals_48[26].apical2.v(0.5)) ')
h.v_v_outputFile12__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/27/pyr_4_sym/2/v
h(' objectvar v_v_outputFile12___ ')
h(' { v_v_outputFile12___ = new Vector() } ')
h(' v_v_outputFile12___.record(&a_pyramidals_48[27].apical2.v(0.5)) ')
h.v_v_outputFile12___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/28/pyr_4_sym/2/v
h(' objectvar v_v_outputFile12____ ')
h(' { v_v_outputFile12____ = new Vector() } ')
h(' v_v_outputFile12____.record(&a_pyramidals_48[28].apical2.v(0.5)) ')
h.v_v_outputFile12____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/29/pyr_4_sym/2/v
h(' objectvar v_v_outputFile12_____ ')
h(' { v_v_outputFile12_____ = new Vector() } ')
h(' v_v_outputFile12_____.record(&a_pyramidals_48[29].apical2.v(0.5)) ')
h.v_v_outputFile12_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/30/pyr_4_sym/2/v
h(' objectvar v_v_outputFile12______ ')
h(' { v_v_outputFile12______ = new Vector() } ')
h(' v_v_outputFile12______.record(&a_pyramidals_48[30].apical2.v(0.5)) ')
h.v_v_outputFile12______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/31/pyr_4_sym/2/v
h(' objectvar v_v_outputFile12_______ ')
h(' { v_v_outputFile12_______ = new Vector() } ')
h(' v_v_outputFile12_______.record(&a_pyramidals_48[31].apical2.v(0.5)) ')
h.v_v_outputFile12_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/32/pyr_4_sym/2/v
h(' objectvar v_v_outputFile12________ ')
h(' { v_v_outputFile12________ = new Vector() } ')
h(' v_v_outputFile12________.record(&a_pyramidals_48[32].apical2.v(0.5)) ')
h.v_v_outputFile12________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/33/pyr_4_sym/2/v
h(' objectvar v_v_outputFile12_________ ')
h(' { v_v_outputFile12_________ = new Vector() } ')
h(' v_v_outputFile12_________.record(&a_pyramidals_48[33].apical2.v(0.5)) ')
h.v_v_outputFile12_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile26
# Column: pyramidals_48/20/pyr_4_sym/5/v
h(' objectvar v_v_outputFile26 ')
h(' { v_v_outputFile26 = new Vector() } ')
h(' v_v_outputFile26.record(&a_pyramidals_48[20].apical1.v(0.5)) ')
h.v_v_outputFile26.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/21/pyr_4_sym/5/v
h(' objectvar v_v_outputFile26_ ')
h(' { v_v_outputFile26_ = new Vector() } ')
h(' v_v_outputFile26_.record(&a_pyramidals_48[21].apical1.v(0.5)) ')
h.v_v_outputFile26_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/22/pyr_4_sym/5/v
h(' objectvar v_v_outputFile26__ ')
h(' { v_v_outputFile26__ = new Vector() } ')
h(' v_v_outputFile26__.record(&a_pyramidals_48[22].apical1.v(0.5)) ')
h.v_v_outputFile26__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/23/pyr_4_sym/5/v
h(' objectvar v_v_outputFile26___ ')
h(' { v_v_outputFile26___ = new Vector() } ')
h(' v_v_outputFile26___.record(&a_pyramidals_48[23].apical1.v(0.5)) ')
h.v_v_outputFile26___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/24/pyr_4_sym/5/v
h(' objectvar v_v_outputFile26____ ')
h(' { v_v_outputFile26____ = new Vector() } ')
h(' v_v_outputFile26____.record(&a_pyramidals_48[24].apical1.v(0.5)) ')
h.v_v_outputFile26____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/25/pyr_4_sym/5/v
h(' objectvar v_v_outputFile26_____ ')
h(' { v_v_outputFile26_____ = new Vector() } ')
h(' v_v_outputFile26_____.record(&a_pyramidals_48[25].apical1.v(0.5)) ')
h.v_v_outputFile26_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/26/pyr_4_sym/5/v
h(' objectvar v_v_outputFile26______ ')
h(' { v_v_outputFile26______ = new Vector() } ')
h(' v_v_outputFile26______.record(&a_pyramidals_48[26].apical1.v(0.5)) ')
h.v_v_outputFile26______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/27/pyr_4_sym/5/v
h(' objectvar v_v_outputFile26_______ ')
h(' { v_v_outputFile26_______ = new Vector() } ')
h(' v_v_outputFile26_______.record(&a_pyramidals_48[27].apical1.v(0.5)) ')
h.v_v_outputFile26_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/28/pyr_4_sym/5/v
h(' objectvar v_v_outputFile26________ ')
h(' { v_v_outputFile26________ = new Vector() } ')
h(' v_v_outputFile26________.record(&a_pyramidals_48[28].apical1.v(0.5)) ')
h.v_v_outputFile26________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/29/pyr_4_sym/5/v
h(' objectvar v_v_outputFile26_________ ')
h(' { v_v_outputFile26_________ = new Vector() } ')
h(' v_v_outputFile26_________.record(&a_pyramidals_48[29].apical1.v(0.5)) ')
h.v_v_outputFile26_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile13
# Column: pyramidals_48/34/pyr_4_sym/2/v
h(' objectvar v_v_outputFile13 ')
h(' { v_v_outputFile13 = new Vector() } ')
h(' v_v_outputFile13.record(&a_pyramidals_48[34].apical2.v(0.5)) ')
h.v_v_outputFile13.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/35/pyr_4_sym/2/v
h(' objectvar v_v_outputFile13_ ')
h(' { v_v_outputFile13_ = new Vector() } ')
h(' v_v_outputFile13_.record(&a_pyramidals_48[35].apical2.v(0.5)) ')
h.v_v_outputFile13_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/36/pyr_4_sym/2/v
h(' objectvar v_v_outputFile13__ ')
h(' { v_v_outputFile13__ = new Vector() } ')
h(' v_v_outputFile13__.record(&a_pyramidals_48[36].apical2.v(0.5)) ')
h.v_v_outputFile13__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/37/pyr_4_sym/2/v
h(' objectvar v_v_outputFile13___ ')
h(' { v_v_outputFile13___ = new Vector() } ')
h(' v_v_outputFile13___.record(&a_pyramidals_48[37].apical2.v(0.5)) ')
h.v_v_outputFile13___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/38/pyr_4_sym/2/v
h(' objectvar v_v_outputFile13____ ')
h(' { v_v_outputFile13____ = new Vector() } ')
h(' v_v_outputFile13____.record(&a_pyramidals_48[38].apical2.v(0.5)) ')
h.v_v_outputFile13____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/39/pyr_4_sym/2/v
h(' objectvar v_v_outputFile13_____ ')
h(' { v_v_outputFile13_____ = new Vector() } ')
h(' v_v_outputFile13_____.record(&a_pyramidals_48[39].apical2.v(0.5)) ')
h.v_v_outputFile13_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/40/pyr_4_sym/2/v
h(' objectvar v_v_outputFile13______ ')
h(' { v_v_outputFile13______ = new Vector() } ')
h(' v_v_outputFile13______.record(&a_pyramidals_48[40].apical2.v(0.5)) ')
h.v_v_outputFile13______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/41/pyr_4_sym/2/v
h(' objectvar v_v_outputFile13_______ ')
h(' { v_v_outputFile13_______ = new Vector() } ')
h(' v_v_outputFile13_______.record(&a_pyramidals_48[41].apical2.v(0.5)) ')
h.v_v_outputFile13_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/42/pyr_4_sym/2/v
h(' objectvar v_v_outputFile13________ ')
h(' { v_v_outputFile13________ = new Vector() } ')
h(' v_v_outputFile13________.record(&a_pyramidals_48[42].apical2.v(0.5)) ')
h.v_v_outputFile13________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/43/pyr_4_sym/2/v
h(' objectvar v_v_outputFile13_________ ')
h(' { v_v_outputFile13_________ = new Vector() } ')
h(' v_v_outputFile13_________.record(&a_pyramidals_48[43].apical2.v(0.5)) ')
h.v_v_outputFile13_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile27
# Column: pyramidals_48/30/pyr_4_sym/5/v
h(' objectvar v_v_outputFile27 ')
h(' { v_v_outputFile27 = new Vector() } ')
h(' v_v_outputFile27.record(&a_pyramidals_48[30].apical1.v(0.5)) ')
h.v_v_outputFile27.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/31/pyr_4_sym/5/v
h(' objectvar v_v_outputFile27_ ')
h(' { v_v_outputFile27_ = new Vector() } ')
h(' v_v_outputFile27_.record(&a_pyramidals_48[31].apical1.v(0.5)) ')
h.v_v_outputFile27_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/32/pyr_4_sym/5/v
h(' objectvar v_v_outputFile27__ ')
h(' { v_v_outputFile27__ = new Vector() } ')
h(' v_v_outputFile27__.record(&a_pyramidals_48[32].apical1.v(0.5)) ')
h.v_v_outputFile27__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/33/pyr_4_sym/5/v
h(' objectvar v_v_outputFile27___ ')
h(' { v_v_outputFile27___ = new Vector() } ')
h(' v_v_outputFile27___.record(&a_pyramidals_48[33].apical1.v(0.5)) ')
h.v_v_outputFile27___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/34/pyr_4_sym/5/v
h(' objectvar v_v_outputFile27____ ')
h(' { v_v_outputFile27____ = new Vector() } ')
h(' v_v_outputFile27____.record(&a_pyramidals_48[34].apical1.v(0.5)) ')
h.v_v_outputFile27____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/35/pyr_4_sym/5/v
h(' objectvar v_v_outputFile27_____ ')
h(' { v_v_outputFile27_____ = new Vector() } ')
h(' v_v_outputFile27_____.record(&a_pyramidals_48[35].apical1.v(0.5)) ')
h.v_v_outputFile27_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/36/pyr_4_sym/5/v
h(' objectvar v_v_outputFile27______ ')
h(' { v_v_outputFile27______ = new Vector() } ')
h(' v_v_outputFile27______.record(&a_pyramidals_48[36].apical1.v(0.5)) ')
h.v_v_outputFile27______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/37/pyr_4_sym/5/v
h(' objectvar v_v_outputFile27_______ ')
h(' { v_v_outputFile27_______ = new Vector() } ')
h(' v_v_outputFile27_______.record(&a_pyramidals_48[37].apical1.v(0.5)) ')
h.v_v_outputFile27_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/38/pyr_4_sym/5/v
h(' objectvar v_v_outputFile27________ ')
h(' { v_v_outputFile27________ = new Vector() } ')
h(' v_v_outputFile27________.record(&a_pyramidals_48[38].apical1.v(0.5)) ')
h.v_v_outputFile27________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/39/pyr_4_sym/5/v
h(' objectvar v_v_outputFile27_________ ')
h(' { v_v_outputFile27_________ = new Vector() } ')
h(' v_v_outputFile27_________.record(&a_pyramidals_48[39].apical1.v(0.5)) ')
h.v_v_outputFile27_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile10
# Column: pyramidals_48/4/pyr_4_sym/2/v
h(' objectvar v_v_outputFile10 ')
h(' { v_v_outputFile10 = new Vector() } ')
h(' v_v_outputFile10.record(&a_pyramidals_48[4].apical2.v(0.5)) ')
h.v_v_outputFile10.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/5/pyr_4_sym/2/v
h(' objectvar v_v_outputFile10_ ')
h(' { v_v_outputFile10_ = new Vector() } ')
h(' v_v_outputFile10_.record(&a_pyramidals_48[5].apical2.v(0.5)) ')
h.v_v_outputFile10_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/6/pyr_4_sym/2/v
h(' objectvar v_v_outputFile10__ ')
h(' { v_v_outputFile10__ = new Vector() } ')
h(' v_v_outputFile10__.record(&a_pyramidals_48[6].apical2.v(0.5)) ')
h.v_v_outputFile10__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/7/pyr_4_sym/2/v
h(' objectvar v_v_outputFile10___ ')
h(' { v_v_outputFile10___ = new Vector() } ')
h(' v_v_outputFile10___.record(&a_pyramidals_48[7].apical2.v(0.5)) ')
h.v_v_outputFile10___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/8/pyr_4_sym/2/v
h(' objectvar v_v_outputFile10____ ')
h(' { v_v_outputFile10____ = new Vector() } ')
h(' v_v_outputFile10____.record(&a_pyramidals_48[8].apical2.v(0.5)) ')
h.v_v_outputFile10____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/9/pyr_4_sym/2/v
h(' objectvar v_v_outputFile10_____ ')
h(' { v_v_outputFile10_____ = new Vector() } ')
h(' v_v_outputFile10_____.record(&a_pyramidals_48[9].apical2.v(0.5)) ')
h.v_v_outputFile10_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/10/pyr_4_sym/2/v
h(' objectvar v_v_outputFile10______ ')
h(' { v_v_outputFile10______ = new Vector() } ')
h(' v_v_outputFile10______.record(&a_pyramidals_48[10].apical2.v(0.5)) ')
h.v_v_outputFile10______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/11/pyr_4_sym/2/v
h(' objectvar v_v_outputFile10_______ ')
h(' { v_v_outputFile10_______ = new Vector() } ')
h(' v_v_outputFile10_______.record(&a_pyramidals_48[11].apical2.v(0.5)) ')
h.v_v_outputFile10_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/12/pyr_4_sym/2/v
h(' objectvar v_v_outputFile10________ ')
h(' { v_v_outputFile10________ = new Vector() } ')
h(' v_v_outputFile10________.record(&a_pyramidals_48[12].apical2.v(0.5)) ')
h.v_v_outputFile10________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/13/pyr_4_sym/2/v
h(' objectvar v_v_outputFile10_________ ')
h(' { v_v_outputFile10_________ = new Vector() } ')
h(' v_v_outputFile10_________.record(&a_pyramidals_48[13].apical2.v(0.5)) ')
h.v_v_outputFile10_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile28
# Column: pyramidals_48/40/pyr_4_sym/5/v
h(' objectvar v_v_outputFile28 ')
h(' { v_v_outputFile28 = new Vector() } ')
h(' v_v_outputFile28.record(&a_pyramidals_48[40].apical1.v(0.5)) ')
h.v_v_outputFile28.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/41/pyr_4_sym/5/v
h(' objectvar v_v_outputFile28_ ')
h(' { v_v_outputFile28_ = new Vector() } ')
h(' v_v_outputFile28_.record(&a_pyramidals_48[41].apical1.v(0.5)) ')
h.v_v_outputFile28_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/42/pyr_4_sym/5/v
h(' objectvar v_v_outputFile28__ ')
h(' { v_v_outputFile28__ = new Vector() } ')
h(' v_v_outputFile28__.record(&a_pyramidals_48[42].apical1.v(0.5)) ')
h.v_v_outputFile28__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/43/pyr_4_sym/5/v
h(' objectvar v_v_outputFile28___ ')
h(' { v_v_outputFile28___ = new Vector() } ')
h(' v_v_outputFile28___.record(&a_pyramidals_48[43].apical1.v(0.5)) ')
h.v_v_outputFile28___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/44/pyr_4_sym/5/v
h(' objectvar v_v_outputFile28____ ')
h(' { v_v_outputFile28____ = new Vector() } ')
h(' v_v_outputFile28____.record(&a_pyramidals_48[44].apical1.v(0.5)) ')
h.v_v_outputFile28____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/45/pyr_4_sym/5/v
h(' objectvar v_v_outputFile28_____ ')
h(' { v_v_outputFile28_____ = new Vector() } ')
h(' v_v_outputFile28_____.record(&a_pyramidals_48[45].apical1.v(0.5)) ')
h.v_v_outputFile28_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/46/pyr_4_sym/5/v
h(' objectvar v_v_outputFile28______ ')
h(' { v_v_outputFile28______ = new Vector() } ')
h(' v_v_outputFile28______.record(&a_pyramidals_48[46].apical1.v(0.5)) ')
h.v_v_outputFile28______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/47/pyr_4_sym/5/v
h(' objectvar v_v_outputFile28_______ ')
h(' { v_v_outputFile28_______ = new Vector() } ')
h(' v_v_outputFile28_______.record(&a_pyramidals_48[47].apical1.v(0.5)) ')
h.v_v_outputFile28_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/0/pyr_4_sym/6/v
h(' objectvar v_v_outputFile28________ ')
h(' { v_v_outputFile28________ = new Vector() } ')
h(' v_v_outputFile28________.record(&a_pyramidals_48[0].basal0.v(0.5)) ')
h.v_v_outputFile28________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/1/pyr_4_sym/6/v
h(' objectvar v_v_outputFile28_________ ')
h(' { v_v_outputFile28_________ = new Vector() } ')
h(' v_v_outputFile28_________.record(&a_pyramidals_48[1].basal0.v(0.5)) ')
h.v_v_outputFile28_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile11
# Column: pyramidals_48/14/pyr_4_sym/2/v
h(' objectvar v_v_outputFile11 ')
h(' { v_v_outputFile11 = new Vector() } ')
h(' v_v_outputFile11.record(&a_pyramidals_48[14].apical2.v(0.5)) ')
h.v_v_outputFile11.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/15/pyr_4_sym/2/v
h(' objectvar v_v_outputFile11_ ')
h(' { v_v_outputFile11_ = new Vector() } ')
h(' v_v_outputFile11_.record(&a_pyramidals_48[15].apical2.v(0.5)) ')
h.v_v_outputFile11_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/16/pyr_4_sym/2/v
h(' objectvar v_v_outputFile11__ ')
h(' { v_v_outputFile11__ = new Vector() } ')
h(' v_v_outputFile11__.record(&a_pyramidals_48[16].apical2.v(0.5)) ')
h.v_v_outputFile11__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/17/pyr_4_sym/2/v
h(' objectvar v_v_outputFile11___ ')
h(' { v_v_outputFile11___ = new Vector() } ')
h(' v_v_outputFile11___.record(&a_pyramidals_48[17].apical2.v(0.5)) ')
h.v_v_outputFile11___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/18/pyr_4_sym/2/v
h(' objectvar v_v_outputFile11____ ')
h(' { v_v_outputFile11____ = new Vector() } ')
h(' v_v_outputFile11____.record(&a_pyramidals_48[18].apical2.v(0.5)) ')
h.v_v_outputFile11____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/19/pyr_4_sym/2/v
h(' objectvar v_v_outputFile11_____ ')
h(' { v_v_outputFile11_____ = new Vector() } ')
h(' v_v_outputFile11_____.record(&a_pyramidals_48[19].apical2.v(0.5)) ')
h.v_v_outputFile11_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/20/pyr_4_sym/2/v
h(' objectvar v_v_outputFile11______ ')
h(' { v_v_outputFile11______ = new Vector() } ')
h(' v_v_outputFile11______.record(&a_pyramidals_48[20].apical2.v(0.5)) ')
h.v_v_outputFile11______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/21/pyr_4_sym/2/v
h(' objectvar v_v_outputFile11_______ ')
h(' { v_v_outputFile11_______ = new Vector() } ')
h(' v_v_outputFile11_______.record(&a_pyramidals_48[21].apical2.v(0.5)) ')
h.v_v_outputFile11_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/22/pyr_4_sym/2/v
h(' objectvar v_v_outputFile11________ ')
h(' { v_v_outputFile11________ = new Vector() } ')
h(' v_v_outputFile11________.record(&a_pyramidals_48[22].apical2.v(0.5)) ')
h.v_v_outputFile11________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/23/pyr_4_sym/2/v
h(' objectvar v_v_outputFile11_________ ')
h(' { v_v_outputFile11_________ = new Vector() } ')
h(' v_v_outputFile11_________.record(&a_pyramidals_48[23].apical2.v(0.5)) ')
h.v_v_outputFile11_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile29
# Column: pyramidals_48/2/pyr_4_sym/6/v
h(' objectvar v_v_outputFile29 ')
h(' { v_v_outputFile29 = new Vector() } ')
h(' v_v_outputFile29.record(&a_pyramidals_48[2].basal0.v(0.5)) ')
h.v_v_outputFile29.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/3/pyr_4_sym/6/v
h(' objectvar v_v_outputFile29_ ')
h(' { v_v_outputFile29_ = new Vector() } ')
h(' v_v_outputFile29_.record(&a_pyramidals_48[3].basal0.v(0.5)) ')
h.v_v_outputFile29_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/4/pyr_4_sym/6/v
h(' objectvar v_v_outputFile29__ ')
h(' { v_v_outputFile29__ = new Vector() } ')
h(' v_v_outputFile29__.record(&a_pyramidals_48[4].basal0.v(0.5)) ')
h.v_v_outputFile29__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/5/pyr_4_sym/6/v
h(' objectvar v_v_outputFile29___ ')
h(' { v_v_outputFile29___ = new Vector() } ')
h(' v_v_outputFile29___.record(&a_pyramidals_48[5].basal0.v(0.5)) ')
h.v_v_outputFile29___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/6/pyr_4_sym/6/v
h(' objectvar v_v_outputFile29____ ')
h(' { v_v_outputFile29____ = new Vector() } ')
h(' v_v_outputFile29____.record(&a_pyramidals_48[6].basal0.v(0.5)) ')
h.v_v_outputFile29____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/7/pyr_4_sym/6/v
h(' objectvar v_v_outputFile29_____ ')
h(' { v_v_outputFile29_____ = new Vector() } ')
h(' v_v_outputFile29_____.record(&a_pyramidals_48[7].basal0.v(0.5)) ')
h.v_v_outputFile29_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/8/pyr_4_sym/6/v
h(' objectvar v_v_outputFile29______ ')
h(' { v_v_outputFile29______ = new Vector() } ')
h(' v_v_outputFile29______.record(&a_pyramidals_48[8].basal0.v(0.5)) ')
h.v_v_outputFile29______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/9/pyr_4_sym/6/v
h(' objectvar v_v_outputFile29_______ ')
h(' { v_v_outputFile29_______ = new Vector() } ')
h(' v_v_outputFile29_______.record(&a_pyramidals_48[9].basal0.v(0.5)) ')
h.v_v_outputFile29_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/10/pyr_4_sym/6/v
h(' objectvar v_v_outputFile29________ ')
h(' { v_v_outputFile29________ = new Vector() } ')
h(' v_v_outputFile29________.record(&a_pyramidals_48[10].basal0.v(0.5)) ')
h.v_v_outputFile29________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/11/pyr_4_sym/6/v
h(' objectvar v_v_outputFile29_________ ')
h(' { v_v_outputFile29_________ = new Vector() } ')
h(' v_v_outputFile29_________.record(&a_pyramidals_48[11].basal0.v(0.5)) ')
h.v_v_outputFile29_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile31
# Column: pyramidals_48/22/pyr_4_sym/6/v
h(' objectvar v_v_outputFile31 ')
h(' { v_v_outputFile31 = new Vector() } ')
h(' v_v_outputFile31.record(&a_pyramidals_48[22].basal0.v(0.5)) ')
h.v_v_outputFile31.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/23/pyr_4_sym/6/v
h(' objectvar v_v_outputFile31_ ')
h(' { v_v_outputFile31_ = new Vector() } ')
h(' v_v_outputFile31_.record(&a_pyramidals_48[23].basal0.v(0.5)) ')
h.v_v_outputFile31_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/24/pyr_4_sym/6/v
h(' objectvar v_v_outputFile31__ ')
h(' { v_v_outputFile31__ = new Vector() } ')
h(' v_v_outputFile31__.record(&a_pyramidals_48[24].basal0.v(0.5)) ')
h.v_v_outputFile31__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/25/pyr_4_sym/6/v
h(' objectvar v_v_outputFile31___ ')
h(' { v_v_outputFile31___ = new Vector() } ')
h(' v_v_outputFile31___.record(&a_pyramidals_48[25].basal0.v(0.5)) ')
h.v_v_outputFile31___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/26/pyr_4_sym/6/v
h(' objectvar v_v_outputFile31____ ')
h(' { v_v_outputFile31____ = new Vector() } ')
h(' v_v_outputFile31____.record(&a_pyramidals_48[26].basal0.v(0.5)) ')
h.v_v_outputFile31____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/27/pyr_4_sym/6/v
h(' objectvar v_v_outputFile31_____ ')
h(' { v_v_outputFile31_____ = new Vector() } ')
h(' v_v_outputFile31_____.record(&a_pyramidals_48[27].basal0.v(0.5)) ')
h.v_v_outputFile31_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/28/pyr_4_sym/6/v
h(' objectvar v_v_outputFile31______ ')
h(' { v_v_outputFile31______ = new Vector() } ')
h(' v_v_outputFile31______.record(&a_pyramidals_48[28].basal0.v(0.5)) ')
h.v_v_outputFile31______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/29/pyr_4_sym/6/v
h(' objectvar v_v_outputFile31_______ ')
h(' { v_v_outputFile31_______ = new Vector() } ')
h(' v_v_outputFile31_______.record(&a_pyramidals_48[29].basal0.v(0.5)) ')
h.v_v_outputFile31_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/30/pyr_4_sym/6/v
h(' objectvar v_v_outputFile31________ ')
h(' { v_v_outputFile31________ = new Vector() } ')
h(' v_v_outputFile31________.record(&a_pyramidals_48[30].basal0.v(0.5)) ')
h.v_v_outputFile31________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/31/pyr_4_sym/6/v
h(' objectvar v_v_outputFile31_________ ')
h(' { v_v_outputFile31_________ = new Vector() } ')
h(' v_v_outputFile31_________.record(&a_pyramidals_48[31].basal0.v(0.5)) ')
h.v_v_outputFile31_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile30
# Column: pyramidals_48/12/pyr_4_sym/6/v
h(' objectvar v_v_outputFile30 ')
h(' { v_v_outputFile30 = new Vector() } ')
h(' v_v_outputFile30.record(&a_pyramidals_48[12].basal0.v(0.5)) ')
h.v_v_outputFile30.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/13/pyr_4_sym/6/v
h(' objectvar v_v_outputFile30_ ')
h(' { v_v_outputFile30_ = new Vector() } ')
h(' v_v_outputFile30_.record(&a_pyramidals_48[13].basal0.v(0.5)) ')
h.v_v_outputFile30_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/14/pyr_4_sym/6/v
h(' objectvar v_v_outputFile30__ ')
h(' { v_v_outputFile30__ = new Vector() } ')
h(' v_v_outputFile30__.record(&a_pyramidals_48[14].basal0.v(0.5)) ')
h.v_v_outputFile30__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/15/pyr_4_sym/6/v
h(' objectvar v_v_outputFile30___ ')
h(' { v_v_outputFile30___ = new Vector() } ')
h(' v_v_outputFile30___.record(&a_pyramidals_48[15].basal0.v(0.5)) ')
h.v_v_outputFile30___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/16/pyr_4_sym/6/v
h(' objectvar v_v_outputFile30____ ')
h(' { v_v_outputFile30____ = new Vector() } ')
h(' v_v_outputFile30____.record(&a_pyramidals_48[16].basal0.v(0.5)) ')
h.v_v_outputFile30____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/17/pyr_4_sym/6/v
h(' objectvar v_v_outputFile30_____ ')
h(' { v_v_outputFile30_____ = new Vector() } ')
h(' v_v_outputFile30_____.record(&a_pyramidals_48[17].basal0.v(0.5)) ')
h.v_v_outputFile30_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/18/pyr_4_sym/6/v
h(' objectvar v_v_outputFile30______ ')
h(' { v_v_outputFile30______ = new Vector() } ')
h(' v_v_outputFile30______.record(&a_pyramidals_48[18].basal0.v(0.5)) ')
h.v_v_outputFile30______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/19/pyr_4_sym/6/v
h(' objectvar v_v_outputFile30_______ ')
h(' { v_v_outputFile30_______ = new Vector() } ')
h(' v_v_outputFile30_______.record(&a_pyramidals_48[19].basal0.v(0.5)) ')
h.v_v_outputFile30_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/20/pyr_4_sym/6/v
h(' objectvar v_v_outputFile30________ ')
h(' { v_v_outputFile30________ = new Vector() } ')
h(' v_v_outputFile30________.record(&a_pyramidals_48[20].basal0.v(0.5)) ')
h.v_v_outputFile30________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/21/pyr_4_sym/6/v
h(' objectvar v_v_outputFile30_________ ')
h(' { v_v_outputFile30_________ = new Vector() } ')
h(' v_v_outputFile30_________.record(&a_pyramidals_48[21].basal0.v(0.5)) ')
h.v_v_outputFile30_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile33
# Column: pyramidals_48/42/pyr_4_sym/6/v
h(' objectvar v_v_outputFile33 ')
h(' { v_v_outputFile33 = new Vector() } ')
h(' v_v_outputFile33.record(&a_pyramidals_48[42].basal0.v(0.5)) ')
h.v_v_outputFile33.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/43/pyr_4_sym/6/v
h(' objectvar v_v_outputFile33_ ')
h(' { v_v_outputFile33_ = new Vector() } ')
h(' v_v_outputFile33_.record(&a_pyramidals_48[43].basal0.v(0.5)) ')
h.v_v_outputFile33_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/44/pyr_4_sym/6/v
h(' objectvar v_v_outputFile33__ ')
h(' { v_v_outputFile33__ = new Vector() } ')
h(' v_v_outputFile33__.record(&a_pyramidals_48[44].basal0.v(0.5)) ')
h.v_v_outputFile33__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/45/pyr_4_sym/6/v
h(' objectvar v_v_outputFile33___ ')
h(' { v_v_outputFile33___ = new Vector() } ')
h(' v_v_outputFile33___.record(&a_pyramidals_48[45].basal0.v(0.5)) ')
h.v_v_outputFile33___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/46/pyr_4_sym/6/v
h(' objectvar v_v_outputFile33____ ')
h(' { v_v_outputFile33____ = new Vector() } ')
h(' v_v_outputFile33____.record(&a_pyramidals_48[46].basal0.v(0.5)) ')
h.v_v_outputFile33____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/47/pyr_4_sym/6/v
h(' objectvar v_v_outputFile33_____ ')
h(' { v_v_outputFile33_____ = new Vector() } ')
h(' v_v_outputFile33_____.record(&a_pyramidals_48[47].basal0.v(0.5)) ')
h.v_v_outputFile33_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/0/pyr_4_sym/7/v
h(' objectvar v_v_outputFile33______ ')
h(' { v_v_outputFile33______ = new Vector() } ')
h(' v_v_outputFile33______.record(&a_pyramidals_48[0].basal1.v(0.5)) ')
h.v_v_outputFile33______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/1/pyr_4_sym/7/v
h(' objectvar v_v_outputFile33_______ ')
h(' { v_v_outputFile33_______ = new Vector() } ')
h(' v_v_outputFile33_______.record(&a_pyramidals_48[1].basal1.v(0.5)) ')
h.v_v_outputFile33_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/2/pyr_4_sym/7/v
h(' objectvar v_v_outputFile33________ ')
h(' { v_v_outputFile33________ = new Vector() } ')
h(' v_v_outputFile33________.record(&a_pyramidals_48[2].basal1.v(0.5)) ')
h.v_v_outputFile33________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/3/pyr_4_sym/7/v
h(' objectvar v_v_outputFile33_________ ')
h(' { v_v_outputFile33_________ = new Vector() } ')
h(' v_v_outputFile33_________.record(&a_pyramidals_48[3].basal1.v(0.5)) ')
h.v_v_outputFile33_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile32
# Column: pyramidals_48/32/pyr_4_sym/6/v
h(' objectvar v_v_outputFile32 ')
h(' { v_v_outputFile32 = new Vector() } ')
h(' v_v_outputFile32.record(&a_pyramidals_48[32].basal0.v(0.5)) ')
h.v_v_outputFile32.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/33/pyr_4_sym/6/v
h(' objectvar v_v_outputFile32_ ')
h(' { v_v_outputFile32_ = new Vector() } ')
h(' v_v_outputFile32_.record(&a_pyramidals_48[33].basal0.v(0.5)) ')
h.v_v_outputFile32_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/34/pyr_4_sym/6/v
h(' objectvar v_v_outputFile32__ ')
h(' { v_v_outputFile32__ = new Vector() } ')
h(' v_v_outputFile32__.record(&a_pyramidals_48[34].basal0.v(0.5)) ')
h.v_v_outputFile32__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/35/pyr_4_sym/6/v
h(' objectvar v_v_outputFile32___ ')
h(' { v_v_outputFile32___ = new Vector() } ')
h(' v_v_outputFile32___.record(&a_pyramidals_48[35].basal0.v(0.5)) ')
h.v_v_outputFile32___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/36/pyr_4_sym/6/v
h(' objectvar v_v_outputFile32____ ')
h(' { v_v_outputFile32____ = new Vector() } ')
h(' v_v_outputFile32____.record(&a_pyramidals_48[36].basal0.v(0.5)) ')
h.v_v_outputFile32____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/37/pyr_4_sym/6/v
h(' objectvar v_v_outputFile32_____ ')
h(' { v_v_outputFile32_____ = new Vector() } ')
h(' v_v_outputFile32_____.record(&a_pyramidals_48[37].basal0.v(0.5)) ')
h.v_v_outputFile32_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/38/pyr_4_sym/6/v
h(' objectvar v_v_outputFile32______ ')
h(' { v_v_outputFile32______ = new Vector() } ')
h(' v_v_outputFile32______.record(&a_pyramidals_48[38].basal0.v(0.5)) ')
h.v_v_outputFile32______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/39/pyr_4_sym/6/v
h(' objectvar v_v_outputFile32_______ ')
h(' { v_v_outputFile32_______ = new Vector() } ')
h(' v_v_outputFile32_______.record(&a_pyramidals_48[39].basal0.v(0.5)) ')
h.v_v_outputFile32_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/40/pyr_4_sym/6/v
h(' objectvar v_v_outputFile32________ ')
h(' { v_v_outputFile32________ = new Vector() } ')
h(' v_v_outputFile32________.record(&a_pyramidals_48[40].basal0.v(0.5)) ')
h.v_v_outputFile32________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/41/pyr_4_sym/6/v
h(' objectvar v_v_outputFile32_________ ')
h(' { v_v_outputFile32_________ = new Vector() } ')
h(' v_v_outputFile32_________.record(&a_pyramidals_48[41].basal0.v(0.5)) ')
h.v_v_outputFile32_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile35
# Column: pyramidals_48/14/pyr_4_sym/7/v
h(' objectvar v_v_outputFile35 ')
h(' { v_v_outputFile35 = new Vector() } ')
h(' v_v_outputFile35.record(&a_pyramidals_48[14].basal1.v(0.5)) ')
h.v_v_outputFile35.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/15/pyr_4_sym/7/v
h(' objectvar v_v_outputFile35_ ')
h(' { v_v_outputFile35_ = new Vector() } ')
h(' v_v_outputFile35_.record(&a_pyramidals_48[15].basal1.v(0.5)) ')
h.v_v_outputFile35_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/16/pyr_4_sym/7/v
h(' objectvar v_v_outputFile35__ ')
h(' { v_v_outputFile35__ = new Vector() } ')
h(' v_v_outputFile35__.record(&a_pyramidals_48[16].basal1.v(0.5)) ')
h.v_v_outputFile35__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/17/pyr_4_sym/7/v
h(' objectvar v_v_outputFile35___ ')
h(' { v_v_outputFile35___ = new Vector() } ')
h(' v_v_outputFile35___.record(&a_pyramidals_48[17].basal1.v(0.5)) ')
h.v_v_outputFile35___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/18/pyr_4_sym/7/v
h(' objectvar v_v_outputFile35____ ')
h(' { v_v_outputFile35____ = new Vector() } ')
h(' v_v_outputFile35____.record(&a_pyramidals_48[18].basal1.v(0.5)) ')
h.v_v_outputFile35____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/19/pyr_4_sym/7/v
h(' objectvar v_v_outputFile35_____ ')
h(' { v_v_outputFile35_____ = new Vector() } ')
h(' v_v_outputFile35_____.record(&a_pyramidals_48[19].basal1.v(0.5)) ')
h.v_v_outputFile35_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/20/pyr_4_sym/7/v
h(' objectvar v_v_outputFile35______ ')
h(' { v_v_outputFile35______ = new Vector() } ')
h(' v_v_outputFile35______.record(&a_pyramidals_48[20].basal1.v(0.5)) ')
h.v_v_outputFile35______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/21/pyr_4_sym/7/v
h(' objectvar v_v_outputFile35_______ ')
h(' { v_v_outputFile35_______ = new Vector() } ')
h(' v_v_outputFile35_______.record(&a_pyramidals_48[21].basal1.v(0.5)) ')
h.v_v_outputFile35_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/22/pyr_4_sym/7/v
h(' objectvar v_v_outputFile35________ ')
h(' { v_v_outputFile35________ = new Vector() } ')
h(' v_v_outputFile35________.record(&a_pyramidals_48[22].basal1.v(0.5)) ')
h.v_v_outputFile35________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/23/pyr_4_sym/7/v
h(' objectvar v_v_outputFile35_________ ')
h(' { v_v_outputFile35_________ = new Vector() } ')
h(' v_v_outputFile35_________.record(&a_pyramidals_48[23].basal1.v(0.5)) ')
h.v_v_outputFile35_________.resize((h.tstop * h.steps_per_ms) + 1)
# File to save: outputFile34
# Column: pyramidals_48/4/pyr_4_sym/7/v
h(' objectvar v_v_outputFile34 ')
h(' { v_v_outputFile34 = new Vector() } ')
h(' v_v_outputFile34.record(&a_pyramidals_48[4].basal1.v(0.5)) ')
h.v_v_outputFile34.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/5/pyr_4_sym/7/v
h(' objectvar v_v_outputFile34_ ')
h(' { v_v_outputFile34_ = new Vector() } ')
h(' v_v_outputFile34_.record(&a_pyramidals_48[5].basal1.v(0.5)) ')
h.v_v_outputFile34_.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/6/pyr_4_sym/7/v
h(' objectvar v_v_outputFile34__ ')
h(' { v_v_outputFile34__ = new Vector() } ')
h(' v_v_outputFile34__.record(&a_pyramidals_48[6].basal1.v(0.5)) ')
h.v_v_outputFile34__.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/7/pyr_4_sym/7/v
h(' objectvar v_v_outputFile34___ ')
h(' { v_v_outputFile34___ = new Vector() } ')
h(' v_v_outputFile34___.record(&a_pyramidals_48[7].basal1.v(0.5)) ')
h.v_v_outputFile34___.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/8/pyr_4_sym/7/v
h(' objectvar v_v_outputFile34____ ')
h(' { v_v_outputFile34____ = new Vector() } ')
h(' v_v_outputFile34____.record(&a_pyramidals_48[8].basal1.v(0.5)) ')
h.v_v_outputFile34____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/9/pyr_4_sym/7/v
h(' objectvar v_v_outputFile34_____ ')
h(' { v_v_outputFile34_____ = new Vector() } ')
h(' v_v_outputFile34_____.record(&a_pyramidals_48[9].basal1.v(0.5)) ')
h.v_v_outputFile34_____.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/10/pyr_4_sym/7/v
h(' objectvar v_v_outputFile34______ ')
h(' { v_v_outputFile34______ = new Vector() } ')
h(' v_v_outputFile34______.record(&a_pyramidals_48[10].basal1.v(0.5)) ')
h.v_v_outputFile34______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/11/pyr_4_sym/7/v
h(' objectvar v_v_outputFile34_______ ')
h(' { v_v_outputFile34_______ = new Vector() } ')
h(' v_v_outputFile34_______.record(&a_pyramidals_48[11].basal1.v(0.5)) ')
h.v_v_outputFile34_______.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/12/pyr_4_sym/7/v
h(' objectvar v_v_outputFile34________ ')
h(' { v_v_outputFile34________ = new Vector() } ')
h(' v_v_outputFile34________.record(&a_pyramidals_48[12].basal1.v(0.5)) ')
h.v_v_outputFile34________.resize((h.tstop * h.steps_per_ms) + 1)
# Column: pyramidals_48/13/pyr_4_sym/7/v
h(' objectvar v_v_outputFile34_________ ')
h(' { v_v_outputFile34_________ = new Vector() } ')
h(' v_v_outputFile34_________.record(&a_pyramidals_48[13].basal1.v(0.5)) ')
h.v_v_outputFile34_________.resize((h.tstop * h.steps_per_ms) + 1)
sim_start = time.time()
print("Running a simulation of %sms (dt = %sms)" % (h.tstop, h.dt))
h.run()
sim_end = time.time()
sim_time = sim_end - sim_start
print("Finished simulation in %f seconds (%f mins), saving results..."%(sim_time, sim_time/60.0))
# File to save: time
py_v_time = [ t/1000 for t in h.v_time.to_python() ] # Convert to Python list for speed...
f_time_f2 = open('time.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_time_f2.write('%f'% py_v_time[i]) # Save in SI units...+ '\n')
f_time_f2.close()
print("Saved data to: time.dat")
# File to save: outputFile16
py_v_v_outputFile16 = [ float(x / 1000.0) for x in h.v_v_outputFile16.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile16_ = [ float(x / 1000.0) for x in h.v_v_outputFile16_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile16__ = [ float(x / 1000.0) for x in h.v_v_outputFile16__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile16___ = [ float(x / 1000.0) for x in h.v_v_outputFile16___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile16____ = [ float(x / 1000.0) for x in h.v_v_outputFile16____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile16_____ = [ float(x / 1000.0) for x in h.v_v_outputFile16_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile16______ = [ float(x / 1000.0) for x in h.v_v_outputFile16______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile16_______ = [ float(x / 1000.0) for x in h.v_v_outputFile16_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile16________ = [ float(x / 1000.0) for x in h.v_v_outputFile16________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile16_________ = [ float(x / 1000.0) for x in h.v_v_outputFile16_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile16_f2 = open('results/results16.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile16_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile16[i]) + '%e\t'%(py_v_v_outputFile16_[i]) + '%e\t'%(py_v_v_outputFile16__[i]) + '%e\t'%(py_v_v_outputFile16___[i]) + '%e\t'%(py_v_v_outputFile16____[i]) + '%e\t'%(py_v_v_outputFile16_____[i]) + '%e\t'%(py_v_v_outputFile16______[i]) + '%e\t'%(py_v_v_outputFile16_______[i]) + '%e\t'%(py_v_v_outputFile16________[i]) + '%e\t'%(py_v_v_outputFile16_________[i]) + '\n')
f_outputFile16_f2.close()
print("Saved data to: results/results16.dat")
# File to save: outputFile17
py_v_v_outputFile17 = [ float(x / 1000.0) for x in h.v_v_outputFile17.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile17_ = [ float(x / 1000.0) for x in h.v_v_outputFile17_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile17__ = [ float(x / 1000.0) for x in h.v_v_outputFile17__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile17___ = [ float(x / 1000.0) for x in h.v_v_outputFile17___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile17____ = [ float(x / 1000.0) for x in h.v_v_outputFile17____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile17_____ = [ float(x / 1000.0) for x in h.v_v_outputFile17_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile17______ = [ float(x / 1000.0) for x in h.v_v_outputFile17______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile17_______ = [ float(x / 1000.0) for x in h.v_v_outputFile17_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile17________ = [ float(x / 1000.0) for x in h.v_v_outputFile17________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile17_________ = [ float(x / 1000.0) for x in h.v_v_outputFile17_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile17_f2 = open('results/results17.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile17_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile17[i]) + '%e\t'%(py_v_v_outputFile17_[i]) + '%e\t'%(py_v_v_outputFile17__[i]) + '%e\t'%(py_v_v_outputFile17___[i]) + '%e\t'%(py_v_v_outputFile17____[i]) + '%e\t'%(py_v_v_outputFile17_____[i]) + '%e\t'%(py_v_v_outputFile17______[i]) + '%e\t'%(py_v_v_outputFile17_______[i]) + '%e\t'%(py_v_v_outputFile17________[i]) + '%e\t'%(py_v_v_outputFile17_________[i]) + '\n')
f_outputFile17_f2.close()
print("Saved data to: results/results17.dat")
# File to save: outputFile14
py_v_v_outputFile14 = [ float(x / 1000.0) for x in h.v_v_outputFile14.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile14_ = [ float(x / 1000.0) for x in h.v_v_outputFile14_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile14__ = [ float(x / 1000.0) for x in h.v_v_outputFile14__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile14___ = [ float(x / 1000.0) for x in h.v_v_outputFile14___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile14____ = [ float(x / 1000.0) for x in h.v_v_outputFile14____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile14_____ = [ float(x / 1000.0) for x in h.v_v_outputFile14_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile14______ = [ float(x / 1000.0) for x in h.v_v_outputFile14______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile14_______ = [ float(x / 1000.0) for x in h.v_v_outputFile14_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile14________ = [ float(x / 1000.0) for x in h.v_v_outputFile14________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile14_________ = [ float(x / 1000.0) for x in h.v_v_outputFile14_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile14_f2 = open('results/results14.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile14_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile14[i]) + '%e\t'%(py_v_v_outputFile14_[i]) + '%e\t'%(py_v_v_outputFile14__[i]) + '%e\t'%(py_v_v_outputFile14___[i]) + '%e\t'%(py_v_v_outputFile14____[i]) + '%e\t'%(py_v_v_outputFile14_____[i]) + '%e\t'%(py_v_v_outputFile14______[i]) + '%e\t'%(py_v_v_outputFile14_______[i]) + '%e\t'%(py_v_v_outputFile14________[i]) + '%e\t'%(py_v_v_outputFile14_________[i]) + '\n')
f_outputFile14_f2.close()
print("Saved data to: results/results14.dat")
# File to save: outputFile15
py_v_v_outputFile15 = [ float(x / 1000.0) for x in h.v_v_outputFile15.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile15_ = [ float(x / 1000.0) for x in h.v_v_outputFile15_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile15__ = [ float(x / 1000.0) for x in h.v_v_outputFile15__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile15___ = [ float(x / 1000.0) for x in h.v_v_outputFile15___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile15____ = [ float(x / 1000.0) for x in h.v_v_outputFile15____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile15_____ = [ float(x / 1000.0) for x in h.v_v_outputFile15_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile15______ = [ float(x / 1000.0) for x in h.v_v_outputFile15______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile15_______ = [ float(x / 1000.0) for x in h.v_v_outputFile15_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile15________ = [ float(x / 1000.0) for x in h.v_v_outputFile15________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile15_________ = [ float(x / 1000.0) for x in h.v_v_outputFile15_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile15_f2 = open('results/results15.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile15_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile15[i]) + '%e\t'%(py_v_v_outputFile15_[i]) + '%e\t'%(py_v_v_outputFile15__[i]) + '%e\t'%(py_v_v_outputFile15___[i]) + '%e\t'%(py_v_v_outputFile15____[i]) + '%e\t'%(py_v_v_outputFile15_____[i]) + '%e\t'%(py_v_v_outputFile15______[i]) + '%e\t'%(py_v_v_outputFile15_______[i]) + '%e\t'%(py_v_v_outputFile15________[i]) + '%e\t'%(py_v_v_outputFile15_________[i]) + '\n')
f_outputFile15_f2.close()
print("Saved data to: results/results15.dat")
# File to save: outputFile38
py_v_v_outputFile38 = [ float(x / 1000.0) for x in h.v_v_outputFile38.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile38_ = [ float(x / 1000.0) for x in h.v_v_outputFile38_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile38__ = [ float(x / 1000.0) for x in h.v_v_outputFile38__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile38___ = [ float(x / 1000.0) for x in h.v_v_outputFile38___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile38____ = [ float(x / 1000.0) for x in h.v_v_outputFile38____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile38_____ = [ float(x / 1000.0) for x in h.v_v_outputFile38_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile38______ = [ float(x / 1000.0) for x in h.v_v_outputFile38______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile38_______ = [ float(x / 1000.0) for x in h.v_v_outputFile38_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile38________ = [ float(x / 1000.0) for x in h.v_v_outputFile38________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile38_________ = [ float(x / 1000.0) for x in h.v_v_outputFile38_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile38_f2 = open('results/results38.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile38_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile38[i]) + '%e\t'%(py_v_v_outputFile38_[i]) + '%e\t'%(py_v_v_outputFile38__[i]) + '%e\t'%(py_v_v_outputFile38___[i]) + '%e\t'%(py_v_v_outputFile38____[i]) + '%e\t'%(py_v_v_outputFile38_____[i]) + '%e\t'%(py_v_v_outputFile38______[i]) + '%e\t'%(py_v_v_outputFile38_______[i]) + '%e\t'%(py_v_v_outputFile38________[i]) + '%e\t'%(py_v_v_outputFile38_________[i]) + '\n')
f_outputFile38_f2.close()
print("Saved data to: results/results38.dat")
# File to save: outputFile39
py_v_v_outputFile39 = [ float(x / 1000.0) for x in h.v_v_outputFile39.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile39_ = [ float(x / 1000.0) for x in h.v_v_outputFile39_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile39__ = [ float(x / 1000.0) for x in h.v_v_outputFile39__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile39___ = [ float(x / 1000.0) for x in h.v_v_outputFile39___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile39____ = [ float(x / 1000.0) for x in h.v_v_outputFile39____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile39_____ = [ float(x / 1000.0) for x in h.v_v_outputFile39_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile39______ = [ float(x / 1000.0) for x in h.v_v_outputFile39______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile39_______ = [ float(x / 1000.0) for x in h.v_v_outputFile39_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile39________ = [ float(x / 1000.0) for x in h.v_v_outputFile39________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile39_________ = [ float(x / 1000.0) for x in h.v_v_outputFile39_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile39_f2 = open('results/results39.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile39_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile39[i]) + '%e\t'%(py_v_v_outputFile39_[i]) + '%e\t'%(py_v_v_outputFile39__[i]) + '%e\t'%(py_v_v_outputFile39___[i]) + '%e\t'%(py_v_v_outputFile39____[i]) + '%e\t'%(py_v_v_outputFile39_____[i]) + '%e\t'%(py_v_v_outputFile39______[i]) + '%e\t'%(py_v_v_outputFile39_______[i]) + '%e\t'%(py_v_v_outputFile39________[i]) + '%e\t'%(py_v_v_outputFile39_________[i]) + '\n')
f_outputFile39_f2.close()
print("Saved data to: results/results39.dat")
# File to save: outputFile36
py_v_v_outputFile36 = [ float(x / 1000.0) for x in h.v_v_outputFile36.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile36_ = [ float(x / 1000.0) for x in h.v_v_outputFile36_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile36__ = [ float(x / 1000.0) for x in h.v_v_outputFile36__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile36___ = [ float(x / 1000.0) for x in h.v_v_outputFile36___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile36____ = [ float(x / 1000.0) for x in h.v_v_outputFile36____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile36_____ = [ float(x / 1000.0) for x in h.v_v_outputFile36_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile36______ = [ float(x / 1000.0) for x in h.v_v_outputFile36______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile36_______ = [ float(x / 1000.0) for x in h.v_v_outputFile36_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile36________ = [ float(x / 1000.0) for x in h.v_v_outputFile36________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile36_________ = [ float(x / 1000.0) for x in h.v_v_outputFile36_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile36_f2 = open('results/results36.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile36_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile36[i]) + '%e\t'%(py_v_v_outputFile36_[i]) + '%e\t'%(py_v_v_outputFile36__[i]) + '%e\t'%(py_v_v_outputFile36___[i]) + '%e\t'%(py_v_v_outputFile36____[i]) + '%e\t'%(py_v_v_outputFile36_____[i]) + '%e\t'%(py_v_v_outputFile36______[i]) + '%e\t'%(py_v_v_outputFile36_______[i]) + '%e\t'%(py_v_v_outputFile36________[i]) + '%e\t'%(py_v_v_outputFile36_________[i]) + '\n')
f_outputFile36_f2.close()
print("Saved data to: results/results36.dat")
# File to save: outputFile18
py_v_v_outputFile18 = [ float(x / 1000.0) for x in h.v_v_outputFile18.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile18_ = [ float(x / 1000.0) for x in h.v_v_outputFile18_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile18__ = [ float(x / 1000.0) for x in h.v_v_outputFile18__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile18___ = [ float(x / 1000.0) for x in h.v_v_outputFile18___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile18____ = [ float(x / 1000.0) for x in h.v_v_outputFile18____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile18_____ = [ float(x / 1000.0) for x in h.v_v_outputFile18_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile18______ = [ float(x / 1000.0) for x in h.v_v_outputFile18______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile18_______ = [ float(x / 1000.0) for x in h.v_v_outputFile18_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile18________ = [ float(x / 1000.0) for x in h.v_v_outputFile18________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile18_________ = [ float(x / 1000.0) for x in h.v_v_outputFile18_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile18_f2 = open('results/results18.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile18_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile18[i]) + '%e\t'%(py_v_v_outputFile18_[i]) + '%e\t'%(py_v_v_outputFile18__[i]) + '%e\t'%(py_v_v_outputFile18___[i]) + '%e\t'%(py_v_v_outputFile18____[i]) + '%e\t'%(py_v_v_outputFile18_____[i]) + '%e\t'%(py_v_v_outputFile18______[i]) + '%e\t'%(py_v_v_outputFile18_______[i]) + '%e\t'%(py_v_v_outputFile18________[i]) + '%e\t'%(py_v_v_outputFile18_________[i]) + '\n')
f_outputFile18_f2.close()
print("Saved data to: results/results18.dat")
# File to save: outputFile37
py_v_v_outputFile37 = [ float(x / 1000.0) for x in h.v_v_outputFile37.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile37_ = [ float(x / 1000.0) for x in h.v_v_outputFile37_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile37__ = [ float(x / 1000.0) for x in h.v_v_outputFile37__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile37___ = [ float(x / 1000.0) for x in h.v_v_outputFile37___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile37____ = [ float(x / 1000.0) for x in h.v_v_outputFile37____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile37_____ = [ float(x / 1000.0) for x in h.v_v_outputFile37_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile37______ = [ float(x / 1000.0) for x in h.v_v_outputFile37______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile37_______ = [ float(x / 1000.0) for x in h.v_v_outputFile37_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile37________ = [ float(x / 1000.0) for x in h.v_v_outputFile37________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile37_________ = [ float(x / 1000.0) for x in h.v_v_outputFile37_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile37_f2 = open('results/results37.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile37_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile37[i]) + '%e\t'%(py_v_v_outputFile37_[i]) + '%e\t'%(py_v_v_outputFile37__[i]) + '%e\t'%(py_v_v_outputFile37___[i]) + '%e\t'%(py_v_v_outputFile37____[i]) + '%e\t'%(py_v_v_outputFile37_____[i]) + '%e\t'%(py_v_v_outputFile37______[i]) + '%e\t'%(py_v_v_outputFile37_______[i]) + '%e\t'%(py_v_v_outputFile37________[i]) + '%e\t'%(py_v_v_outputFile37_________[i]) + '\n')
f_outputFile37_f2.close()
print("Saved data to: results/results37.dat")
# File to save: outputFile19
py_v_v_outputFile19 = [ float(x / 1000.0) for x in h.v_v_outputFile19.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile19_ = [ float(x / 1000.0) for x in h.v_v_outputFile19_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile19__ = [ float(x / 1000.0) for x in h.v_v_outputFile19__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile19___ = [ float(x / 1000.0) for x in h.v_v_outputFile19___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile19____ = [ float(x / 1000.0) for x in h.v_v_outputFile19____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile19_____ = [ float(x / 1000.0) for x in h.v_v_outputFile19_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile19______ = [ float(x / 1000.0) for x in h.v_v_outputFile19______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile19_______ = [ float(x / 1000.0) for x in h.v_v_outputFile19_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile19________ = [ float(x / 1000.0) for x in h.v_v_outputFile19________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile19_________ = [ float(x / 1000.0) for x in h.v_v_outputFile19_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile19_f2 = open('results/results19.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile19_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile19[i]) + '%e\t'%(py_v_v_outputFile19_[i]) + '%e\t'%(py_v_v_outputFile19__[i]) + '%e\t'%(py_v_v_outputFile19___[i]) + '%e\t'%(py_v_v_outputFile19____[i]) + '%e\t'%(py_v_v_outputFile19_____[i]) + '%e\t'%(py_v_v_outputFile19______[i]) + '%e\t'%(py_v_v_outputFile19_______[i]) + '%e\t'%(py_v_v_outputFile19________[i]) + '%e\t'%(py_v_v_outputFile19_________[i]) + '\n')
f_outputFile19_f2.close()
print("Saved data to: results/results19.dat")
# File to save: outputFile0
py_v_v_outputFile0 = [ float(x / 1000.0) for x in h.v_v_outputFile0.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile0_ = [ float(x / 1000.0) for x in h.v_v_outputFile0_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile0__ = [ float(x / 1000.0) for x in h.v_v_outputFile0__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile0___ = [ float(x / 1000.0) for x in h.v_v_outputFile0___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile0____ = [ float(x / 1000.0) for x in h.v_v_outputFile0____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile0_____ = [ float(x / 1000.0) for x in h.v_v_outputFile0_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile0______ = [ float(x / 1000.0) for x in h.v_v_outputFile0______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile0_______ = [ float(x / 1000.0) for x in h.v_v_outputFile0_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile0________ = [ float(x / 1000.0) for x in h.v_v_outputFile0________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile0_________ = [ float(x / 1000.0) for x in h.v_v_outputFile0_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile0_f2 = open('results/results0.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile0_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile0[i]) + '%e\t'%(py_v_v_outputFile0_[i]) + '%e\t'%(py_v_v_outputFile0__[i]) + '%e\t'%(py_v_v_outputFile0___[i]) + '%e\t'%(py_v_v_outputFile0____[i]) + '%e\t'%(py_v_v_outputFile0_____[i]) + '%e\t'%(py_v_v_outputFile0______[i]) + '%e\t'%(py_v_v_outputFile0_______[i]) + '%e\t'%(py_v_v_outputFile0________[i]) + '%e\t'%(py_v_v_outputFile0_________[i]) + '\n')
f_outputFile0_f2.close()
print("Saved data to: results/results0.dat")
# File to save: outputFile1
py_v_v_outputFile1 = [ float(x / 1000.0) for x in h.v_v_outputFile1.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile1_ = [ float(x / 1000.0) for x in h.v_v_outputFile1_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile1__ = [ float(x / 1000.0) for x in h.v_v_outputFile1__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile1___ = [ float(x / 1000.0) for x in h.v_v_outputFile1___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile1____ = [ float(x / 1000.0) for x in h.v_v_outputFile1____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile1_____ = [ float(x / 1000.0) for x in h.v_v_outputFile1_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile1______ = [ float(x / 1000.0) for x in h.v_v_outputFile1______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile1_______ = [ float(x / 1000.0) for x in h.v_v_outputFile1_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile1________ = [ float(x / 1000.0) for x in h.v_v_outputFile1________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile1_________ = [ float(x / 1000.0) for x in h.v_v_outputFile1_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile1_f2 = open('results/results1.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile1_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile1[i]) + '%e\t'%(py_v_v_outputFile1_[i]) + '%e\t'%(py_v_v_outputFile1__[i]) + '%e\t'%(py_v_v_outputFile1___[i]) + '%e\t'%(py_v_v_outputFile1____[i]) + '%e\t'%(py_v_v_outputFile1_____[i]) + '%e\t'%(py_v_v_outputFile1______[i]) + '%e\t'%(py_v_v_outputFile1_______[i]) + '%e\t'%(py_v_v_outputFile1________[i]) + '%e\t'%(py_v_v_outputFile1_________[i]) + '\n')
f_outputFile1_f2.close()
print("Saved data to: results/results1.dat")
# File to save: outputFile2
py_v_v_outputFile2 = [ float(x / 1000.0) for x in h.v_v_outputFile2.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile2_ = [ float(x / 1000.0) for x in h.v_v_outputFile2_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile2__ = [ float(x / 1000.0) for x in h.v_v_outputFile2__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile2___ = [ float(x / 1000.0) for x in h.v_v_outputFile2___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile2____ = [ float(x / 1000.0) for x in h.v_v_outputFile2____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile2_____ = [ float(x / 1000.0) for x in h.v_v_outputFile2_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile2______ = [ float(x / 1000.0) for x in h.v_v_outputFile2______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile2_______ = [ float(x / 1000.0) for x in h.v_v_outputFile2_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile2________ = [ float(x / 1000.0) for x in h.v_v_outputFile2________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile2_________ = [ float(x / 1000.0) for x in h.v_v_outputFile2_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile2_f2 = open('results/results2.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile2_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile2[i]) + '%e\t'%(py_v_v_outputFile2_[i]) + '%e\t'%(py_v_v_outputFile2__[i]) + '%e\t'%(py_v_v_outputFile2___[i]) + '%e\t'%(py_v_v_outputFile2____[i]) + '%e\t'%(py_v_v_outputFile2_____[i]) + '%e\t'%(py_v_v_outputFile2______[i]) + '%e\t'%(py_v_v_outputFile2_______[i]) + '%e\t'%(py_v_v_outputFile2________[i]) + '%e\t'%(py_v_v_outputFile2_________[i]) + '\n')
f_outputFile2_f2.close()
print("Saved data to: results/results2.dat")
# File to save: outputFile3
py_v_v_outputFile3 = [ float(x / 1000.0) for x in h.v_v_outputFile3.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile3_ = [ float(x / 1000.0) for x in h.v_v_outputFile3_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile3__ = [ float(x / 1000.0) for x in h.v_v_outputFile3__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile3___ = [ float(x / 1000.0) for x in h.v_v_outputFile3___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile3____ = [ float(x / 1000.0) for x in h.v_v_outputFile3____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile3_____ = [ float(x / 1000.0) for x in h.v_v_outputFile3_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile3______ = [ float(x / 1000.0) for x in h.v_v_outputFile3______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile3_______ = [ float(x / 1000.0) for x in h.v_v_outputFile3_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile3________ = [ float(x / 1000.0) for x in h.v_v_outputFile3________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile3_________ = [ float(x / 1000.0) for x in h.v_v_outputFile3_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile3_f2 = open('results/results3.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile3_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile3[i]) + '%e\t'%(py_v_v_outputFile3_[i]) + '%e\t'%(py_v_v_outputFile3__[i]) + '%e\t'%(py_v_v_outputFile3___[i]) + '%e\t'%(py_v_v_outputFile3____[i]) + '%e\t'%(py_v_v_outputFile3_____[i]) + '%e\t'%(py_v_v_outputFile3______[i]) + '%e\t'%(py_v_v_outputFile3_______[i]) + '%e\t'%(py_v_v_outputFile3________[i]) + '%e\t'%(py_v_v_outputFile3_________[i]) + '\n')
f_outputFile3_f2.close()
print("Saved data to: results/results3.dat")
# File to save: outputFile4
py_v_v_outputFile4 = [ float(x / 1000.0) for x in h.v_v_outputFile4.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile4_ = [ float(x / 1000.0) for x in h.v_v_outputFile4_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile4__ = [ float(x / 1000.0) for x in h.v_v_outputFile4__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile4___ = [ float(x / 1000.0) for x in h.v_v_outputFile4___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile4____ = [ float(x / 1000.0) for x in h.v_v_outputFile4____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile4_____ = [ float(x / 1000.0) for x in h.v_v_outputFile4_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile4______ = [ float(x / 1000.0) for x in h.v_v_outputFile4______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile4_______ = [ float(x / 1000.0) for x in h.v_v_outputFile4_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile4________ = [ float(x / 1000.0) for x in h.v_v_outputFile4________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile4_________ = [ float(x / 1000.0) for x in h.v_v_outputFile4_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile4_f2 = open('results/results4.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile4_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile4[i]) + '%e\t'%(py_v_v_outputFile4_[i]) + '%e\t'%(py_v_v_outputFile4__[i]) + '%e\t'%(py_v_v_outputFile4___[i]) + '%e\t'%(py_v_v_outputFile4____[i]) + '%e\t'%(py_v_v_outputFile4_____[i]) + '%e\t'%(py_v_v_outputFile4______[i]) + '%e\t'%(py_v_v_outputFile4_______[i]) + '%e\t'%(py_v_v_outputFile4________[i]) + '%e\t'%(py_v_v_outputFile4_________[i]) + '\n')
f_outputFile4_f2.close()
print("Saved data to: results/results4.dat")
# File to save: outputFile5
py_v_v_outputFile5 = [ float(x / 1000.0) for x in h.v_v_outputFile5.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile5_ = [ float(x / 1000.0) for x in h.v_v_outputFile5_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile5__ = [ float(x / 1000.0) for x in h.v_v_outputFile5__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile5___ = [ float(x / 1000.0) for x in h.v_v_outputFile5___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile5____ = [ float(x / 1000.0) for x in h.v_v_outputFile5____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile5_____ = [ float(x / 1000.0) for x in h.v_v_outputFile5_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile5______ = [ float(x / 1000.0) for x in h.v_v_outputFile5______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile5_______ = [ float(x / 1000.0) for x in h.v_v_outputFile5_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile5________ = [ float(x / 1000.0) for x in h.v_v_outputFile5________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile5_________ = [ float(x / 1000.0) for x in h.v_v_outputFile5_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile5_f2 = open('results/results5.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile5_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile5[i]) + '%e\t'%(py_v_v_outputFile5_[i]) + '%e\t'%(py_v_v_outputFile5__[i]) + '%e\t'%(py_v_v_outputFile5___[i]) + '%e\t'%(py_v_v_outputFile5____[i]) + '%e\t'%(py_v_v_outputFile5_____[i]) + '%e\t'%(py_v_v_outputFile5______[i]) + '%e\t'%(py_v_v_outputFile5_______[i]) + '%e\t'%(py_v_v_outputFile5________[i]) + '%e\t'%(py_v_v_outputFile5_________[i]) + '\n')
f_outputFile5_f2.close()
print("Saved data to: results/results5.dat")
# File to save: outputFile20
py_v_v_outputFile20 = [ float(x / 1000.0) for x in h.v_v_outputFile20.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile20_ = [ float(x / 1000.0) for x in h.v_v_outputFile20_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile20__ = [ float(x / 1000.0) for x in h.v_v_outputFile20__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile20___ = [ float(x / 1000.0) for x in h.v_v_outputFile20___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile20____ = [ float(x / 1000.0) for x in h.v_v_outputFile20____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile20_____ = [ float(x / 1000.0) for x in h.v_v_outputFile20_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile20______ = [ float(x / 1000.0) for x in h.v_v_outputFile20______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile20_______ = [ float(x / 1000.0) for x in h.v_v_outputFile20_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile20________ = [ float(x / 1000.0) for x in h.v_v_outputFile20________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile20_________ = [ float(x / 1000.0) for x in h.v_v_outputFile20_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile20_f2 = open('results/results20.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile20_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile20[i]) + '%e\t'%(py_v_v_outputFile20_[i]) + '%e\t'%(py_v_v_outputFile20__[i]) + '%e\t'%(py_v_v_outputFile20___[i]) + '%e\t'%(py_v_v_outputFile20____[i]) + '%e\t'%(py_v_v_outputFile20_____[i]) + '%e\t'%(py_v_v_outputFile20______[i]) + '%e\t'%(py_v_v_outputFile20_______[i]) + '%e\t'%(py_v_v_outputFile20________[i]) + '%e\t'%(py_v_v_outputFile20_________[i]) + '\n')
f_outputFile20_f2.close()
print("Saved data to: results/results20.dat")
# File to save: outputFile45
py_v_v_outputFile45 = [ float(x / 1000.0) for x in h.v_v_outputFile45.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile45_ = [ float(x / 1000.0) for x in h.v_v_outputFile45_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile45__ = [ float(x / 1000.0) for x in h.v_v_outputFile45__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile45___ = [ float(x / 1000.0) for x in h.v_v_outputFile45___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile45____ = [ float(x / 1000.0) for x in h.v_v_outputFile45____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile45_____ = [ float(x / 1000.0) for x in h.v_v_outputFile45_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile45_f2 = open('results/results45.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile45_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile45[i]) + '%e\t'%(py_v_v_outputFile45_[i]) + '%e\t'%(py_v_v_outputFile45__[i]) + '%e\t'%(py_v_v_outputFile45___[i]) + '%e\t'%(py_v_v_outputFile45____[i]) + '%e\t'%(py_v_v_outputFile45_____[i]) + '\n')
f_outputFile45_f2.close()
print("Saved data to: results/results45.dat")
# File to save: outputFile6
py_v_v_outputFile6 = [ float(x / 1000.0) for x in h.v_v_outputFile6.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile6_ = [ float(x / 1000.0) for x in h.v_v_outputFile6_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile6__ = [ float(x / 1000.0) for x in h.v_v_outputFile6__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile6___ = [ float(x / 1000.0) for x in h.v_v_outputFile6___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile6____ = [ float(x / 1000.0) for x in h.v_v_outputFile6____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile6_____ = [ float(x / 1000.0) for x in h.v_v_outputFile6_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile6______ = [ float(x / 1000.0) for x in h.v_v_outputFile6______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile6_______ = [ float(x / 1000.0) for x in h.v_v_outputFile6_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile6________ = [ float(x / 1000.0) for x in h.v_v_outputFile6________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile6_________ = [ float(x / 1000.0) for x in h.v_v_outputFile6_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile6_f2 = open('results/results6.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile6_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile6[i]) + '%e\t'%(py_v_v_outputFile6_[i]) + '%e\t'%(py_v_v_outputFile6__[i]) + '%e\t'%(py_v_v_outputFile6___[i]) + '%e\t'%(py_v_v_outputFile6____[i]) + '%e\t'%(py_v_v_outputFile6_____[i]) + '%e\t'%(py_v_v_outputFile6______[i]) + '%e\t'%(py_v_v_outputFile6_______[i]) + '%e\t'%(py_v_v_outputFile6________[i]) + '%e\t'%(py_v_v_outputFile6_________[i]) + '\n')
f_outputFile6_f2.close()
print("Saved data to: results/results6.dat")
# File to save: outputFile44
py_v_v_outputFile44 = [ float(x / 1000.0) for x in h.v_v_outputFile44.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile44_ = [ float(x / 1000.0) for x in h.v_v_outputFile44_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile44__ = [ float(x / 1000.0) for x in h.v_v_outputFile44__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile44___ = [ float(x / 1000.0) for x in h.v_v_outputFile44___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile44____ = [ float(x / 1000.0) for x in h.v_v_outputFile44____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile44_____ = [ float(x / 1000.0) for x in h.v_v_outputFile44_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile44______ = [ float(x / 1000.0) for x in h.v_v_outputFile44______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile44_______ = [ float(x / 1000.0) for x in h.v_v_outputFile44_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile44________ = [ float(x / 1000.0) for x in h.v_v_outputFile44________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile44_________ = [ float(x / 1000.0) for x in h.v_v_outputFile44_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile44_f2 = open('results/results44.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile44_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile44[i]) + '%e\t'%(py_v_v_outputFile44_[i]) + '%e\t'%(py_v_v_outputFile44__[i]) + '%e\t'%(py_v_v_outputFile44___[i]) + '%e\t'%(py_v_v_outputFile44____[i]) + '%e\t'%(py_v_v_outputFile44_____[i]) + '%e\t'%(py_v_v_outputFile44______[i]) + '%e\t'%(py_v_v_outputFile44_______[i]) + '%e\t'%(py_v_v_outputFile44________[i]) + '%e\t'%(py_v_v_outputFile44_________[i]) + '\n')
f_outputFile44_f2.close()
print("Saved data to: results/results44.dat")
# File to save: outputFile7
py_v_v_outputFile7 = [ float(x / 1000.0) for x in h.v_v_outputFile7.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile7_ = [ float(x / 1000.0) for x in h.v_v_outputFile7_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile7__ = [ float(x / 1000.0) for x in h.v_v_outputFile7__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile7___ = [ float(x / 1000.0) for x in h.v_v_outputFile7___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile7____ = [ float(x / 1000.0) for x in h.v_v_outputFile7____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile7_____ = [ float(x / 1000.0) for x in h.v_v_outputFile7_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile7______ = [ float(x / 1000.0) for x in h.v_v_outputFile7______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile7_______ = [ float(x / 1000.0) for x in h.v_v_outputFile7_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile7________ = [ float(x / 1000.0) for x in h.v_v_outputFile7________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile7_________ = [ float(x / 1000.0) for x in h.v_v_outputFile7_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile7_f2 = open('results/results7.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile7_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile7[i]) + '%e\t'%(py_v_v_outputFile7_[i]) + '%e\t'%(py_v_v_outputFile7__[i]) + '%e\t'%(py_v_v_outputFile7___[i]) + '%e\t'%(py_v_v_outputFile7____[i]) + '%e\t'%(py_v_v_outputFile7_____[i]) + '%e\t'%(py_v_v_outputFile7______[i]) + '%e\t'%(py_v_v_outputFile7_______[i]) + '%e\t'%(py_v_v_outputFile7________[i]) + '%e\t'%(py_v_v_outputFile7_________[i]) + '\n')
f_outputFile7_f2.close()
print("Saved data to: results/results7.dat")
# File to save: outputFile43
py_v_v_outputFile43 = [ float(x / 1000.0) for x in h.v_v_outputFile43.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile43_ = [ float(x / 1000.0) for x in h.v_v_outputFile43_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile43__ = [ float(x / 1000.0) for x in h.v_v_outputFile43__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile43___ = [ float(x / 1000.0) for x in h.v_v_outputFile43___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile43____ = [ float(x / 1000.0) for x in h.v_v_outputFile43____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile43_____ = [ float(x / 1000.0) for x in h.v_v_outputFile43_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile43______ = [ float(x / 1000.0) for x in h.v_v_outputFile43______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile43_______ = [ float(x / 1000.0) for x in h.v_v_outputFile43_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile43________ = [ float(x / 1000.0) for x in h.v_v_outputFile43________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile43_________ = [ float(x / 1000.0) for x in h.v_v_outputFile43_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile43_f2 = open('results/results43.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile43_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile43[i]) + '%e\t'%(py_v_v_outputFile43_[i]) + '%e\t'%(py_v_v_outputFile43__[i]) + '%e\t'%(py_v_v_outputFile43___[i]) + '%e\t'%(py_v_v_outputFile43____[i]) + '%e\t'%(py_v_v_outputFile43_____[i]) + '%e\t'%(py_v_v_outputFile43______[i]) + '%e\t'%(py_v_v_outputFile43_______[i]) + '%e\t'%(py_v_v_outputFile43________[i]) + '%e\t'%(py_v_v_outputFile43_________[i]) + '\n')
f_outputFile43_f2.close()
print("Saved data to: results/results43.dat")
# File to save: outputFile8
py_v_v_outputFile8 = [ float(x / 1000.0) for x in h.v_v_outputFile8.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile8_ = [ float(x / 1000.0) for x in h.v_v_outputFile8_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile8__ = [ float(x / 1000.0) for x in h.v_v_outputFile8__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile8___ = [ float(x / 1000.0) for x in h.v_v_outputFile8___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile8____ = [ float(x / 1000.0) for x in h.v_v_outputFile8____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile8_____ = [ float(x / 1000.0) for x in h.v_v_outputFile8_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile8______ = [ float(x / 1000.0) for x in h.v_v_outputFile8______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile8_______ = [ float(x / 1000.0) for x in h.v_v_outputFile8_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile8________ = [ float(x / 1000.0) for x in h.v_v_outputFile8________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile8_________ = [ float(x / 1000.0) for x in h.v_v_outputFile8_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile8_f2 = open('results/results8.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile8_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile8[i]) + '%e\t'%(py_v_v_outputFile8_[i]) + '%e\t'%(py_v_v_outputFile8__[i]) + '%e\t'%(py_v_v_outputFile8___[i]) + '%e\t'%(py_v_v_outputFile8____[i]) + '%e\t'%(py_v_v_outputFile8_____[i]) + '%e\t'%(py_v_v_outputFile8______[i]) + '%e\t'%(py_v_v_outputFile8_______[i]) + '%e\t'%(py_v_v_outputFile8________[i]) + '%e\t'%(py_v_v_outputFile8_________[i]) + '\n')
f_outputFile8_f2.close()
print("Saved data to: results/results8.dat")
# File to save: outputFile42
py_v_v_outputFile42 = [ float(x / 1000.0) for x in h.v_v_outputFile42.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile42_ = [ float(x / 1000.0) for x in h.v_v_outputFile42_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile42__ = [ float(x / 1000.0) for x in h.v_v_outputFile42__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile42___ = [ float(x / 1000.0) for x in h.v_v_outputFile42___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile42____ = [ float(x / 1000.0) for x in h.v_v_outputFile42____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile42_____ = [ float(x / 1000.0) for x in h.v_v_outputFile42_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile42______ = [ float(x / 1000.0) for x in h.v_v_outputFile42______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile42_______ = [ float(x / 1000.0) for x in h.v_v_outputFile42_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile42________ = [ float(x / 1000.0) for x in h.v_v_outputFile42________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile42_________ = [ float(x / 1000.0) for x in h.v_v_outputFile42_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile42_f2 = open('results/results42.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile42_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile42[i]) + '%e\t'%(py_v_v_outputFile42_[i]) + '%e\t'%(py_v_v_outputFile42__[i]) + '%e\t'%(py_v_v_outputFile42___[i]) + '%e\t'%(py_v_v_outputFile42____[i]) + '%e\t'%(py_v_v_outputFile42_____[i]) + '%e\t'%(py_v_v_outputFile42______[i]) + '%e\t'%(py_v_v_outputFile42_______[i]) + '%e\t'%(py_v_v_outputFile42________[i]) + '%e\t'%(py_v_v_outputFile42_________[i]) + '\n')
f_outputFile42_f2.close()
print("Saved data to: results/results42.dat")
# File to save: outputFile24
py_v_v_outputFile24 = [ float(x / 1000.0) for x in h.v_v_outputFile24.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile24_ = [ float(x / 1000.0) for x in h.v_v_outputFile24_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile24__ = [ float(x / 1000.0) for x in h.v_v_outputFile24__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile24___ = [ float(x / 1000.0) for x in h.v_v_outputFile24___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile24____ = [ float(x / 1000.0) for x in h.v_v_outputFile24____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile24_____ = [ float(x / 1000.0) for x in h.v_v_outputFile24_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile24______ = [ float(x / 1000.0) for x in h.v_v_outputFile24______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile24_______ = [ float(x / 1000.0) for x in h.v_v_outputFile24_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile24________ = [ float(x / 1000.0) for x in h.v_v_outputFile24________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile24_________ = [ float(x / 1000.0) for x in h.v_v_outputFile24_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile24_f2 = open('results/results24.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile24_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile24[i]) + '%e\t'%(py_v_v_outputFile24_[i]) + '%e\t'%(py_v_v_outputFile24__[i]) + '%e\t'%(py_v_v_outputFile24___[i]) + '%e\t'%(py_v_v_outputFile24____[i]) + '%e\t'%(py_v_v_outputFile24_____[i]) + '%e\t'%(py_v_v_outputFile24______[i]) + '%e\t'%(py_v_v_outputFile24_______[i]) + '%e\t'%(py_v_v_outputFile24________[i]) + '%e\t'%(py_v_v_outputFile24_________[i]) + '\n')
f_outputFile24_f2.close()
print("Saved data to: results/results24.dat")
# File to save: outputFile9
py_v_v_outputFile9 = [ float(x / 1000.0) for x in h.v_v_outputFile9.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile9_ = [ float(x / 1000.0) for x in h.v_v_outputFile9_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile9__ = [ float(x / 1000.0) for x in h.v_v_outputFile9__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile9___ = [ float(x / 1000.0) for x in h.v_v_outputFile9___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile9____ = [ float(x / 1000.0) for x in h.v_v_outputFile9____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile9_____ = [ float(x / 1000.0) for x in h.v_v_outputFile9_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile9______ = [ float(x / 1000.0) for x in h.v_v_outputFile9______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile9_______ = [ float(x / 1000.0) for x in h.v_v_outputFile9_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile9________ = [ float(x / 1000.0) for x in h.v_v_outputFile9________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile9_________ = [ float(x / 1000.0) for x in h.v_v_outputFile9_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile9_f2 = open('results/results9.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile9_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile9[i]) + '%e\t'%(py_v_v_outputFile9_[i]) + '%e\t'%(py_v_v_outputFile9__[i]) + '%e\t'%(py_v_v_outputFile9___[i]) + '%e\t'%(py_v_v_outputFile9____[i]) + '%e\t'%(py_v_v_outputFile9_____[i]) + '%e\t'%(py_v_v_outputFile9______[i]) + '%e\t'%(py_v_v_outputFile9_______[i]) + '%e\t'%(py_v_v_outputFile9________[i]) + '%e\t'%(py_v_v_outputFile9_________[i]) + '\n')
f_outputFile9_f2.close()
print("Saved data to: results/results9.dat")
# File to save: outputFile41
py_v_v_outputFile41 = [ float(x / 1000.0) for x in h.v_v_outputFile41.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile41_ = [ float(x / 1000.0) for x in h.v_v_outputFile41_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile41__ = [ float(x / 1000.0) for x in h.v_v_outputFile41__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile41___ = [ float(x / 1000.0) for x in h.v_v_outputFile41___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile41____ = [ float(x / 1000.0) for x in h.v_v_outputFile41____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile41_____ = [ float(x / 1000.0) for x in h.v_v_outputFile41_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile41______ = [ float(x / 1000.0) for x in h.v_v_outputFile41______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile41_______ = [ float(x / 1000.0) for x in h.v_v_outputFile41_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile41________ = [ float(x / 1000.0) for x in h.v_v_outputFile41________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile41_________ = [ float(x / 1000.0) for x in h.v_v_outputFile41_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile41_f2 = open('results/results41.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile41_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile41[i]) + '%e\t'%(py_v_v_outputFile41_[i]) + '%e\t'%(py_v_v_outputFile41__[i]) + '%e\t'%(py_v_v_outputFile41___[i]) + '%e\t'%(py_v_v_outputFile41____[i]) + '%e\t'%(py_v_v_outputFile41_____[i]) + '%e\t'%(py_v_v_outputFile41______[i]) + '%e\t'%(py_v_v_outputFile41_______[i]) + '%e\t'%(py_v_v_outputFile41________[i]) + '%e\t'%(py_v_v_outputFile41_________[i]) + '\n')
f_outputFile41_f2.close()
print("Saved data to: results/results41.dat")
# File to save: outputFile23
py_v_v_outputFile23 = [ float(x / 1000.0) for x in h.v_v_outputFile23.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile23_ = [ float(x / 1000.0) for x in h.v_v_outputFile23_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile23__ = [ float(x / 1000.0) for x in h.v_v_outputFile23__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile23___ = [ float(x / 1000.0) for x in h.v_v_outputFile23___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile23____ = [ float(x / 1000.0) for x in h.v_v_outputFile23____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile23_____ = [ float(x / 1000.0) for x in h.v_v_outputFile23_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile23______ = [ float(x / 1000.0) for x in h.v_v_outputFile23______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile23_______ = [ float(x / 1000.0) for x in h.v_v_outputFile23_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile23________ = [ float(x / 1000.0) for x in h.v_v_outputFile23________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile23_________ = [ float(x / 1000.0) for x in h.v_v_outputFile23_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile23_f2 = open('results/results23.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile23_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile23[i]) + '%e\t'%(py_v_v_outputFile23_[i]) + '%e\t'%(py_v_v_outputFile23__[i]) + '%e\t'%(py_v_v_outputFile23___[i]) + '%e\t'%(py_v_v_outputFile23____[i]) + '%e\t'%(py_v_v_outputFile23_____[i]) + '%e\t'%(py_v_v_outputFile23______[i]) + '%e\t'%(py_v_v_outputFile23_______[i]) + '%e\t'%(py_v_v_outputFile23________[i]) + '%e\t'%(py_v_v_outputFile23_________[i]) + '\n')
f_outputFile23_f2.close()
print("Saved data to: results/results23.dat")
# File to save: outputFile40
py_v_v_outputFile40 = [ float(x / 1000.0) for x in h.v_v_outputFile40.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile40_ = [ float(x / 1000.0) for x in h.v_v_outputFile40_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile40__ = [ float(x / 1000.0) for x in h.v_v_outputFile40__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile40___ = [ float(x / 1000.0) for x in h.v_v_outputFile40___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile40____ = [ float(x / 1000.0) for x in h.v_v_outputFile40____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile40_____ = [ float(x / 1000.0) for x in h.v_v_outputFile40_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile40______ = [ float(x / 1000.0) for x in h.v_v_outputFile40______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile40_______ = [ float(x / 1000.0) for x in h.v_v_outputFile40_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile40________ = [ float(x / 1000.0) for x in h.v_v_outputFile40________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile40_________ = [ float(x / 1000.0) for x in h.v_v_outputFile40_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile40_f2 = open('results/results40.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile40_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile40[i]) + '%e\t'%(py_v_v_outputFile40_[i]) + '%e\t'%(py_v_v_outputFile40__[i]) + '%e\t'%(py_v_v_outputFile40___[i]) + '%e\t'%(py_v_v_outputFile40____[i]) + '%e\t'%(py_v_v_outputFile40_____[i]) + '%e\t'%(py_v_v_outputFile40______[i]) + '%e\t'%(py_v_v_outputFile40_______[i]) + '%e\t'%(py_v_v_outputFile40________[i]) + '%e\t'%(py_v_v_outputFile40_________[i]) + '\n')
f_outputFile40_f2.close()
print("Saved data to: results/results40.dat")
# File to save: outputFile22
py_v_v_outputFile22 = [ float(x / 1000.0) for x in h.v_v_outputFile22.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile22_ = [ float(x / 1000.0) for x in h.v_v_outputFile22_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile22__ = [ float(x / 1000.0) for x in h.v_v_outputFile22__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile22___ = [ float(x / 1000.0) for x in h.v_v_outputFile22___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile22____ = [ float(x / 1000.0) for x in h.v_v_outputFile22____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile22_____ = [ float(x / 1000.0) for x in h.v_v_outputFile22_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile22______ = [ float(x / 1000.0) for x in h.v_v_outputFile22______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile22_______ = [ float(x / 1000.0) for x in h.v_v_outputFile22_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile22________ = [ float(x / 1000.0) for x in h.v_v_outputFile22________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile22_________ = [ float(x / 1000.0) for x in h.v_v_outputFile22_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile22_f2 = open('results/results22.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile22_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile22[i]) + '%e\t'%(py_v_v_outputFile22_[i]) + '%e\t'%(py_v_v_outputFile22__[i]) + '%e\t'%(py_v_v_outputFile22___[i]) + '%e\t'%(py_v_v_outputFile22____[i]) + '%e\t'%(py_v_v_outputFile22_____[i]) + '%e\t'%(py_v_v_outputFile22______[i]) + '%e\t'%(py_v_v_outputFile22_______[i]) + '%e\t'%(py_v_v_outputFile22________[i]) + '%e\t'%(py_v_v_outputFile22_________[i]) + '\n')
f_outputFile22_f2.close()
print("Saved data to: results/results22.dat")
# File to save: outputFile21
py_v_v_outputFile21 = [ float(x / 1000.0) for x in h.v_v_outputFile21.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile21_ = [ float(x / 1000.0) for x in h.v_v_outputFile21_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile21__ = [ float(x / 1000.0) for x in h.v_v_outputFile21__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile21___ = [ float(x / 1000.0) for x in h.v_v_outputFile21___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile21____ = [ float(x / 1000.0) for x in h.v_v_outputFile21____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile21_____ = [ float(x / 1000.0) for x in h.v_v_outputFile21_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile21______ = [ float(x / 1000.0) for x in h.v_v_outputFile21______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile21_______ = [ float(x / 1000.0) for x in h.v_v_outputFile21_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile21________ = [ float(x / 1000.0) for x in h.v_v_outputFile21________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile21_________ = [ float(x / 1000.0) for x in h.v_v_outputFile21_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile21_f2 = open('results/results21.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile21_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile21[i]) + '%e\t'%(py_v_v_outputFile21_[i]) + '%e\t'%(py_v_v_outputFile21__[i]) + '%e\t'%(py_v_v_outputFile21___[i]) + '%e\t'%(py_v_v_outputFile21____[i]) + '%e\t'%(py_v_v_outputFile21_____[i]) + '%e\t'%(py_v_v_outputFile21______[i]) + '%e\t'%(py_v_v_outputFile21_______[i]) + '%e\t'%(py_v_v_outputFile21________[i]) + '%e\t'%(py_v_v_outputFile21_________[i]) + '\n')
f_outputFile21_f2.close()
print("Saved data to: results/results21.dat")
# File to save: outputFile25
py_v_v_outputFile25 = [ float(x / 1000.0) for x in h.v_v_outputFile25.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile25_ = [ float(x / 1000.0) for x in h.v_v_outputFile25_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile25__ = [ float(x / 1000.0) for x in h.v_v_outputFile25__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile25___ = [ float(x / 1000.0) for x in h.v_v_outputFile25___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile25____ = [ float(x / 1000.0) for x in h.v_v_outputFile25____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile25_____ = [ float(x / 1000.0) for x in h.v_v_outputFile25_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile25______ = [ float(x / 1000.0) for x in h.v_v_outputFile25______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile25_______ = [ float(x / 1000.0) for x in h.v_v_outputFile25_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile25________ = [ float(x / 1000.0) for x in h.v_v_outputFile25________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile25_________ = [ float(x / 1000.0) for x in h.v_v_outputFile25_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile25_f2 = open('results/results25.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile25_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile25[i]) + '%e\t'%(py_v_v_outputFile25_[i]) + '%e\t'%(py_v_v_outputFile25__[i]) + '%e\t'%(py_v_v_outputFile25___[i]) + '%e\t'%(py_v_v_outputFile25____[i]) + '%e\t'%(py_v_v_outputFile25_____[i]) + '%e\t'%(py_v_v_outputFile25______[i]) + '%e\t'%(py_v_v_outputFile25_______[i]) + '%e\t'%(py_v_v_outputFile25________[i]) + '%e\t'%(py_v_v_outputFile25_________[i]) + '\n')
f_outputFile25_f2.close()
print("Saved data to: results/results25.dat")
# File to save: outputFile12
py_v_v_outputFile12 = [ float(x / 1000.0) for x in h.v_v_outputFile12.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile12_ = [ float(x / 1000.0) for x in h.v_v_outputFile12_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile12__ = [ float(x / 1000.0) for x in h.v_v_outputFile12__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile12___ = [ float(x / 1000.0) for x in h.v_v_outputFile12___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile12____ = [ float(x / 1000.0) for x in h.v_v_outputFile12____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile12_____ = [ float(x / 1000.0) for x in h.v_v_outputFile12_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile12______ = [ float(x / 1000.0) for x in h.v_v_outputFile12______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile12_______ = [ float(x / 1000.0) for x in h.v_v_outputFile12_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile12________ = [ float(x / 1000.0) for x in h.v_v_outputFile12________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile12_________ = [ float(x / 1000.0) for x in h.v_v_outputFile12_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile12_f2 = open('results/results12.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile12_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile12[i]) + '%e\t'%(py_v_v_outputFile12_[i]) + '%e\t'%(py_v_v_outputFile12__[i]) + '%e\t'%(py_v_v_outputFile12___[i]) + '%e\t'%(py_v_v_outputFile12____[i]) + '%e\t'%(py_v_v_outputFile12_____[i]) + '%e\t'%(py_v_v_outputFile12______[i]) + '%e\t'%(py_v_v_outputFile12_______[i]) + '%e\t'%(py_v_v_outputFile12________[i]) + '%e\t'%(py_v_v_outputFile12_________[i]) + '\n')
f_outputFile12_f2.close()
print("Saved data to: results/results12.dat")
# File to save: outputFile26
py_v_v_outputFile26 = [ float(x / 1000.0) for x in h.v_v_outputFile26.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile26_ = [ float(x / 1000.0) for x in h.v_v_outputFile26_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile26__ = [ float(x / 1000.0) for x in h.v_v_outputFile26__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile26___ = [ float(x / 1000.0) for x in h.v_v_outputFile26___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile26____ = [ float(x / 1000.0) for x in h.v_v_outputFile26____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile26_____ = [ float(x / 1000.0) for x in h.v_v_outputFile26_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile26______ = [ float(x / 1000.0) for x in h.v_v_outputFile26______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile26_______ = [ float(x / 1000.0) for x in h.v_v_outputFile26_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile26________ = [ float(x / 1000.0) for x in h.v_v_outputFile26________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile26_________ = [ float(x / 1000.0) for x in h.v_v_outputFile26_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile26_f2 = open('results/results26.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile26_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile26[i]) + '%e\t'%(py_v_v_outputFile26_[i]) + '%e\t'%(py_v_v_outputFile26__[i]) + '%e\t'%(py_v_v_outputFile26___[i]) + '%e\t'%(py_v_v_outputFile26____[i]) + '%e\t'%(py_v_v_outputFile26_____[i]) + '%e\t'%(py_v_v_outputFile26______[i]) + '%e\t'%(py_v_v_outputFile26_______[i]) + '%e\t'%(py_v_v_outputFile26________[i]) + '%e\t'%(py_v_v_outputFile26_________[i]) + '\n')
f_outputFile26_f2.close()
print("Saved data to: results/results26.dat")
# File to save: outputFile13
py_v_v_outputFile13 = [ float(x / 1000.0) for x in h.v_v_outputFile13.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile13_ = [ float(x / 1000.0) for x in h.v_v_outputFile13_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile13__ = [ float(x / 1000.0) for x in h.v_v_outputFile13__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile13___ = [ float(x / 1000.0) for x in h.v_v_outputFile13___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile13____ = [ float(x / 1000.0) for x in h.v_v_outputFile13____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile13_____ = [ float(x / 1000.0) for x in h.v_v_outputFile13_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile13______ = [ float(x / 1000.0) for x in h.v_v_outputFile13______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile13_______ = [ float(x / 1000.0) for x in h.v_v_outputFile13_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile13________ = [ float(x / 1000.0) for x in h.v_v_outputFile13________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile13_________ = [ float(x / 1000.0) for x in h.v_v_outputFile13_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile13_f2 = open('results/results13.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile13_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile13[i]) + '%e\t'%(py_v_v_outputFile13_[i]) + '%e\t'%(py_v_v_outputFile13__[i]) + '%e\t'%(py_v_v_outputFile13___[i]) + '%e\t'%(py_v_v_outputFile13____[i]) + '%e\t'%(py_v_v_outputFile13_____[i]) + '%e\t'%(py_v_v_outputFile13______[i]) + '%e\t'%(py_v_v_outputFile13_______[i]) + '%e\t'%(py_v_v_outputFile13________[i]) + '%e\t'%(py_v_v_outputFile13_________[i]) + '\n')
f_outputFile13_f2.close()
print("Saved data to: results/results13.dat")
# File to save: outputFile27
py_v_v_outputFile27 = [ float(x / 1000.0) for x in h.v_v_outputFile27.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile27_ = [ float(x / 1000.0) for x in h.v_v_outputFile27_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile27__ = [ float(x / 1000.0) for x in h.v_v_outputFile27__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile27___ = [ float(x / 1000.0) for x in h.v_v_outputFile27___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile27____ = [ float(x / 1000.0) for x in h.v_v_outputFile27____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile27_____ = [ float(x / 1000.0) for x in h.v_v_outputFile27_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile27______ = [ float(x / 1000.0) for x in h.v_v_outputFile27______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile27_______ = [ float(x / 1000.0) for x in h.v_v_outputFile27_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile27________ = [ float(x / 1000.0) for x in h.v_v_outputFile27________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile27_________ = [ float(x / 1000.0) for x in h.v_v_outputFile27_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile27_f2 = open('results/results27.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile27_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile27[i]) + '%e\t'%(py_v_v_outputFile27_[i]) + '%e\t'%(py_v_v_outputFile27__[i]) + '%e\t'%(py_v_v_outputFile27___[i]) + '%e\t'%(py_v_v_outputFile27____[i]) + '%e\t'%(py_v_v_outputFile27_____[i]) + '%e\t'%(py_v_v_outputFile27______[i]) + '%e\t'%(py_v_v_outputFile27_______[i]) + '%e\t'%(py_v_v_outputFile27________[i]) + '%e\t'%(py_v_v_outputFile27_________[i]) + '\n')
f_outputFile27_f2.close()
print("Saved data to: results/results27.dat")
# File to save: outputFile10
py_v_v_outputFile10 = [ float(x / 1000.0) for x in h.v_v_outputFile10.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile10_ = [ float(x / 1000.0) for x in h.v_v_outputFile10_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile10__ = [ float(x / 1000.0) for x in h.v_v_outputFile10__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile10___ = [ float(x / 1000.0) for x in h.v_v_outputFile10___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile10____ = [ float(x / 1000.0) for x in h.v_v_outputFile10____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile10_____ = [ float(x / 1000.0) for x in h.v_v_outputFile10_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile10______ = [ float(x / 1000.0) for x in h.v_v_outputFile10______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile10_______ = [ float(x / 1000.0) for x in h.v_v_outputFile10_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile10________ = [ float(x / 1000.0) for x in h.v_v_outputFile10________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile10_________ = [ float(x / 1000.0) for x in h.v_v_outputFile10_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile10_f2 = open('results/results10.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile10_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile10[i]) + '%e\t'%(py_v_v_outputFile10_[i]) + '%e\t'%(py_v_v_outputFile10__[i]) + '%e\t'%(py_v_v_outputFile10___[i]) + '%e\t'%(py_v_v_outputFile10____[i]) + '%e\t'%(py_v_v_outputFile10_____[i]) + '%e\t'%(py_v_v_outputFile10______[i]) + '%e\t'%(py_v_v_outputFile10_______[i]) + '%e\t'%(py_v_v_outputFile10________[i]) + '%e\t'%(py_v_v_outputFile10_________[i]) + '\n')
f_outputFile10_f2.close()
print("Saved data to: results/results10.dat")
# File to save: outputFile28
py_v_v_outputFile28 = [ float(x / 1000.0) for x in h.v_v_outputFile28.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile28_ = [ float(x / 1000.0) for x in h.v_v_outputFile28_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile28__ = [ float(x / 1000.0) for x in h.v_v_outputFile28__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile28___ = [ float(x / 1000.0) for x in h.v_v_outputFile28___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile28____ = [ float(x / 1000.0) for x in h.v_v_outputFile28____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile28_____ = [ float(x / 1000.0) for x in h.v_v_outputFile28_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile28______ = [ float(x / 1000.0) for x in h.v_v_outputFile28______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile28_______ = [ float(x / 1000.0) for x in h.v_v_outputFile28_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile28________ = [ float(x / 1000.0) for x in h.v_v_outputFile28________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile28_________ = [ float(x / 1000.0) for x in h.v_v_outputFile28_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile28_f2 = open('results/results28.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile28_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile28[i]) + '%e\t'%(py_v_v_outputFile28_[i]) + '%e\t'%(py_v_v_outputFile28__[i]) + '%e\t'%(py_v_v_outputFile28___[i]) + '%e\t'%(py_v_v_outputFile28____[i]) + '%e\t'%(py_v_v_outputFile28_____[i]) + '%e\t'%(py_v_v_outputFile28______[i]) + '%e\t'%(py_v_v_outputFile28_______[i]) + '%e\t'%(py_v_v_outputFile28________[i]) + '%e\t'%(py_v_v_outputFile28_________[i]) + '\n')
f_outputFile28_f2.close()
print("Saved data to: results/results28.dat")
# File to save: outputFile11
py_v_v_outputFile11 = [ float(x / 1000.0) for x in h.v_v_outputFile11.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile11_ = [ float(x / 1000.0) for x in h.v_v_outputFile11_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile11__ = [ float(x / 1000.0) for x in h.v_v_outputFile11__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile11___ = [ float(x / 1000.0) for x in h.v_v_outputFile11___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile11____ = [ float(x / 1000.0) for x in h.v_v_outputFile11____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile11_____ = [ float(x / 1000.0) for x in h.v_v_outputFile11_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile11______ = [ float(x / 1000.0) for x in h.v_v_outputFile11______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile11_______ = [ float(x / 1000.0) for x in h.v_v_outputFile11_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile11________ = [ float(x / 1000.0) for x in h.v_v_outputFile11________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile11_________ = [ float(x / 1000.0) for x in h.v_v_outputFile11_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile11_f2 = open('results/results11.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile11_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile11[i]) + '%e\t'%(py_v_v_outputFile11_[i]) + '%e\t'%(py_v_v_outputFile11__[i]) + '%e\t'%(py_v_v_outputFile11___[i]) + '%e\t'%(py_v_v_outputFile11____[i]) + '%e\t'%(py_v_v_outputFile11_____[i]) + '%e\t'%(py_v_v_outputFile11______[i]) + '%e\t'%(py_v_v_outputFile11_______[i]) + '%e\t'%(py_v_v_outputFile11________[i]) + '%e\t'%(py_v_v_outputFile11_________[i]) + '\n')
f_outputFile11_f2.close()
print("Saved data to: results/results11.dat")
# File to save: outputFile29
py_v_v_outputFile29 = [ float(x / 1000.0) for x in h.v_v_outputFile29.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile29_ = [ float(x / 1000.0) for x in h.v_v_outputFile29_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile29__ = [ float(x / 1000.0) for x in h.v_v_outputFile29__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile29___ = [ float(x / 1000.0) for x in h.v_v_outputFile29___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile29____ = [ float(x / 1000.0) for x in h.v_v_outputFile29____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile29_____ = [ float(x / 1000.0) for x in h.v_v_outputFile29_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile29______ = [ float(x / 1000.0) for x in h.v_v_outputFile29______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile29_______ = [ float(x / 1000.0) for x in h.v_v_outputFile29_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile29________ = [ float(x / 1000.0) for x in h.v_v_outputFile29________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile29_________ = [ float(x / 1000.0) for x in h.v_v_outputFile29_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile29_f2 = open('results/results29.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile29_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile29[i]) + '%e\t'%(py_v_v_outputFile29_[i]) + '%e\t'%(py_v_v_outputFile29__[i]) + '%e\t'%(py_v_v_outputFile29___[i]) + '%e\t'%(py_v_v_outputFile29____[i]) + '%e\t'%(py_v_v_outputFile29_____[i]) + '%e\t'%(py_v_v_outputFile29______[i]) + '%e\t'%(py_v_v_outputFile29_______[i]) + '%e\t'%(py_v_v_outputFile29________[i]) + '%e\t'%(py_v_v_outputFile29_________[i]) + '\n')
f_outputFile29_f2.close()
print("Saved data to: results/results29.dat")
# File to save: outputFile31
py_v_v_outputFile31 = [ float(x / 1000.0) for x in h.v_v_outputFile31.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile31_ = [ float(x / 1000.0) for x in h.v_v_outputFile31_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile31__ = [ float(x / 1000.0) for x in h.v_v_outputFile31__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile31___ = [ float(x / 1000.0) for x in h.v_v_outputFile31___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile31____ = [ float(x / 1000.0) for x in h.v_v_outputFile31____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile31_____ = [ float(x / 1000.0) for x in h.v_v_outputFile31_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile31______ = [ float(x / 1000.0) for x in h.v_v_outputFile31______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile31_______ = [ float(x / 1000.0) for x in h.v_v_outputFile31_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile31________ = [ float(x / 1000.0) for x in h.v_v_outputFile31________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile31_________ = [ float(x / 1000.0) for x in h.v_v_outputFile31_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile31_f2 = open('results/results31.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile31_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile31[i]) + '%e\t'%(py_v_v_outputFile31_[i]) + '%e\t'%(py_v_v_outputFile31__[i]) + '%e\t'%(py_v_v_outputFile31___[i]) + '%e\t'%(py_v_v_outputFile31____[i]) + '%e\t'%(py_v_v_outputFile31_____[i]) + '%e\t'%(py_v_v_outputFile31______[i]) + '%e\t'%(py_v_v_outputFile31_______[i]) + '%e\t'%(py_v_v_outputFile31________[i]) + '%e\t'%(py_v_v_outputFile31_________[i]) + '\n')
f_outputFile31_f2.close()
print("Saved data to: results/results31.dat")
# File to save: outputFile30
py_v_v_outputFile30 = [ float(x / 1000.0) for x in h.v_v_outputFile30.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile30_ = [ float(x / 1000.0) for x in h.v_v_outputFile30_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile30__ = [ float(x / 1000.0) for x in h.v_v_outputFile30__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile30___ = [ float(x / 1000.0) for x in h.v_v_outputFile30___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile30____ = [ float(x / 1000.0) for x in h.v_v_outputFile30____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile30_____ = [ float(x / 1000.0) for x in h.v_v_outputFile30_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile30______ = [ float(x / 1000.0) for x in h.v_v_outputFile30______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile30_______ = [ float(x / 1000.0) for x in h.v_v_outputFile30_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile30________ = [ float(x / 1000.0) for x in h.v_v_outputFile30________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile30_________ = [ float(x / 1000.0) for x in h.v_v_outputFile30_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile30_f2 = open('results/results30.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile30_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile30[i]) + '%e\t'%(py_v_v_outputFile30_[i]) + '%e\t'%(py_v_v_outputFile30__[i]) + '%e\t'%(py_v_v_outputFile30___[i]) + '%e\t'%(py_v_v_outputFile30____[i]) + '%e\t'%(py_v_v_outputFile30_____[i]) + '%e\t'%(py_v_v_outputFile30______[i]) + '%e\t'%(py_v_v_outputFile30_______[i]) + '%e\t'%(py_v_v_outputFile30________[i]) + '%e\t'%(py_v_v_outputFile30_________[i]) + '\n')
f_outputFile30_f2.close()
print("Saved data to: results/results30.dat")
# File to save: outputFile33
py_v_v_outputFile33 = [ float(x / 1000.0) for x in h.v_v_outputFile33.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile33_ = [ float(x / 1000.0) for x in h.v_v_outputFile33_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile33__ = [ float(x / 1000.0) for x in h.v_v_outputFile33__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile33___ = [ float(x / 1000.0) for x in h.v_v_outputFile33___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile33____ = [ float(x / 1000.0) for x in h.v_v_outputFile33____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile33_____ = [ float(x / 1000.0) for x in h.v_v_outputFile33_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile33______ = [ float(x / 1000.0) for x in h.v_v_outputFile33______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile33_______ = [ float(x / 1000.0) for x in h.v_v_outputFile33_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile33________ = [ float(x / 1000.0) for x in h.v_v_outputFile33________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile33_________ = [ float(x / 1000.0) for x in h.v_v_outputFile33_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile33_f2 = open('results/results33.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile33_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile33[i]) + '%e\t'%(py_v_v_outputFile33_[i]) + '%e\t'%(py_v_v_outputFile33__[i]) + '%e\t'%(py_v_v_outputFile33___[i]) + '%e\t'%(py_v_v_outputFile33____[i]) + '%e\t'%(py_v_v_outputFile33_____[i]) + '%e\t'%(py_v_v_outputFile33______[i]) + '%e\t'%(py_v_v_outputFile33_______[i]) + '%e\t'%(py_v_v_outputFile33________[i]) + '%e\t'%(py_v_v_outputFile33_________[i]) + '\n')
f_outputFile33_f2.close()
print("Saved data to: results/results33.dat")
# File to save: outputFile32
py_v_v_outputFile32 = [ float(x / 1000.0) for x in h.v_v_outputFile32.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile32_ = [ float(x / 1000.0) for x in h.v_v_outputFile32_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile32__ = [ float(x / 1000.0) for x in h.v_v_outputFile32__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile32___ = [ float(x / 1000.0) for x in h.v_v_outputFile32___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile32____ = [ float(x / 1000.0) for x in h.v_v_outputFile32____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile32_____ = [ float(x / 1000.0) for x in h.v_v_outputFile32_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile32______ = [ float(x / 1000.0) for x in h.v_v_outputFile32______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile32_______ = [ float(x / 1000.0) for x in h.v_v_outputFile32_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile32________ = [ float(x / 1000.0) for x in h.v_v_outputFile32________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile32_________ = [ float(x / 1000.0) for x in h.v_v_outputFile32_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile32_f2 = open('results/results32.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile32_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile32[i]) + '%e\t'%(py_v_v_outputFile32_[i]) + '%e\t'%(py_v_v_outputFile32__[i]) + '%e\t'%(py_v_v_outputFile32___[i]) + '%e\t'%(py_v_v_outputFile32____[i]) + '%e\t'%(py_v_v_outputFile32_____[i]) + '%e\t'%(py_v_v_outputFile32______[i]) + '%e\t'%(py_v_v_outputFile32_______[i]) + '%e\t'%(py_v_v_outputFile32________[i]) + '%e\t'%(py_v_v_outputFile32_________[i]) + '\n')
f_outputFile32_f2.close()
print("Saved data to: results/results32.dat")
# File to save: outputFile35
py_v_v_outputFile35 = [ float(x / 1000.0) for x in h.v_v_outputFile35.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile35_ = [ float(x / 1000.0) for x in h.v_v_outputFile35_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile35__ = [ float(x / 1000.0) for x in h.v_v_outputFile35__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile35___ = [ float(x / 1000.0) for x in h.v_v_outputFile35___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile35____ = [ float(x / 1000.0) for x in h.v_v_outputFile35____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile35_____ = [ float(x / 1000.0) for x in h.v_v_outputFile35_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile35______ = [ float(x / 1000.0) for x in h.v_v_outputFile35______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile35_______ = [ float(x / 1000.0) for x in h.v_v_outputFile35_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile35________ = [ float(x / 1000.0) for x in h.v_v_outputFile35________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile35_________ = [ float(x / 1000.0) for x in h.v_v_outputFile35_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile35_f2 = open('results/results35.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile35_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile35[i]) + '%e\t'%(py_v_v_outputFile35_[i]) + '%e\t'%(py_v_v_outputFile35__[i]) + '%e\t'%(py_v_v_outputFile35___[i]) + '%e\t'%(py_v_v_outputFile35____[i]) + '%e\t'%(py_v_v_outputFile35_____[i]) + '%e\t'%(py_v_v_outputFile35______[i]) + '%e\t'%(py_v_v_outputFile35_______[i]) + '%e\t'%(py_v_v_outputFile35________[i]) + '%e\t'%(py_v_v_outputFile35_________[i]) + '\n')
f_outputFile35_f2.close()
print("Saved data to: results/results35.dat")
# File to save: outputFile34
py_v_v_outputFile34 = [ float(x / 1000.0) for x in h.v_v_outputFile34.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile34_ = [ float(x / 1000.0) for x in h.v_v_outputFile34_.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile34__ = [ float(x / 1000.0) for x in h.v_v_outputFile34__.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile34___ = [ float(x / 1000.0) for x in h.v_v_outputFile34___.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile34____ = [ float(x / 1000.0) for x in h.v_v_outputFile34____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile34_____ = [ float(x / 1000.0) for x in h.v_v_outputFile34_____.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile34______ = [ float(x / 1000.0) for x in h.v_v_outputFile34______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile34_______ = [ float(x / 1000.0) for x in h.v_v_outputFile34_______.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile34________ = [ float(x / 1000.0) for x in h.v_v_outputFile34________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
py_v_v_outputFile34_________ = [ float(x / 1000.0) for x in h.v_v_outputFile34_________.to_python() ] # Convert to Python list for speed, variable has dim: voltage
f_outputFile34_f2 = open('results/results34.dat', 'w')
for i in range(int(h.tstop * h.steps_per_ms) + 1):
f_outputFile34_f2.write('%e\t'% py_v_time[i] + '%e\t'%(py_v_v_outputFile34[i]) + '%e\t'%(py_v_v_outputFile34_[i]) + '%e\t'%(py_v_v_outputFile34__[i]) + '%e\t'%(py_v_v_outputFile34___[i]) + '%e\t'%(py_v_v_outputFile34____[i]) + '%e\t'%(py_v_v_outputFile34_____[i]) + '%e\t'%(py_v_v_outputFile34______[i]) + '%e\t'%(py_v_v_outputFile34_______[i]) + '%e\t'%(py_v_v_outputFile34________[i]) + '%e\t'%(py_v_v_outputFile34_________[i]) + '\n')
f_outputFile34_f2.close()
print("Saved data to: results/results34.dat")
save_end = time.time()
save_time = save_end - sim_end
print("Finished saving results in %f seconds"%(save_time))
print("Done")
quit()
| 71.019475
| 451
| 0.738704
| 100,014
| 525,118
| 3.441928
| 0.037115
| 0.02324
| 0.091843
| 0.053805
| 0.858547
| 0.83886
| 0.821811
| 0.767401
| 0.739255
| 0.696503
| 0
| 0.159656
| 0.095451
| 525,118
| 7,393
| 452
| 71.029082
| 0.565069
| 0.205428
| 0
| 0.01014
| 0
| 0.213376
| 0.710498
| 0.512987
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.000432
| 0
| 0.000432
| 0.01165
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b0bf807fb007754900ae316059c2c6a8e9ea7e6e
| 386
|
py
|
Python
|
Sums.py
|
Taekkjin/Taekk
|
e1479c14ccb778c5690c1f93f364fad5395cbf41
|
[
"MIT"
] | null | null | null |
Sums.py
|
Taekkjin/Taekk
|
e1479c14ccb778c5690c1f93f364fad5395cbf41
|
[
"MIT"
] | null | null | null |
Sums.py
|
Taekkjin/Taekk
|
e1479c14ccb778c5690c1f93f364fad5395cbf41
|
[
"MIT"
] | null | null | null |
n = 0
for i in range(1,6):
n = n + 1
print(n)
print('\n')
n = 0
for i in range(1,11):
n = n + 1
print(n)
print('\n')
n = 0
for i in range(0,6):
n = n+i**2
print(n)
print('\n')
n = 0
for i in range(0,6):
n = n+i**3
print(n)
print('\n')
n = 0
for i in range(0,6):
n = n + (i - 1)**2
print(n)
print('\n')
n = 1
for i in range(0,6):
n = i*(n-1)/2
print(n)
print('\n')
| 9.414634
| 21
| 0.484456
| 96
| 386
| 1.947917
| 0.125
| 0.385027
| 0.192513
| 0.352941
| 0.962567
| 0.962567
| 0.802139
| 0.647059
| 0.647059
| 0.647059
| 0
| 0.09507
| 0.264249
| 386
| 41
| 22
| 9.414634
| 0.56338
| 0
| 0
| 0.766667
| 0
| 0
| 0.031008
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.4
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b0c8494c7a1d2a3f44e2e174f489619d55c35da6
| 1,571
|
py
|
Python
|
colour/appearance/__init__.py
|
BPearlstine/colour
|
40f0281295496774d2a19eee017d50fd0c265bd8
|
[
"Cube",
"BSD-3-Clause"
] | 2
|
2020-05-03T20:15:42.000Z
|
2021-04-09T18:19:06.000Z
|
colour/appearance/__init__.py
|
BPearlstine/colour
|
40f0281295496774d2a19eee017d50fd0c265bd8
|
[
"Cube",
"BSD-3-Clause"
] | null | null | null |
colour/appearance/__init__.py
|
BPearlstine/colour
|
40f0281295496774d2a19eee017d50fd0c265bd8
|
[
"Cube",
"BSD-3-Clause"
] | 1
|
2019-12-11T19:48:27.000Z
|
2019-12-11T19:48:27.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .hunt import (Hunt_InductionFactors, HUNT_VIEWING_CONDITIONS,
Hunt_Specification, XYZ_to_Hunt)
from .atd95 import ATD95_Specification, XYZ_to_ATD95
from .ciecam02 import (CIECAM02_InductionFactors, CIECAM02_VIEWING_CONDITIONS,
CIECAM02_Specification, XYZ_to_CIECAM02,
CIECAM02_to_XYZ)
from .cam16 import (CAM16_InductionFactors, CAM16_VIEWING_CONDITIONS,
CAM16_Specification, XYZ_to_CAM16, CAM16_to_XYZ)
from .llab import (LLAB_InductionFactors, LLAB_VIEWING_CONDITIONS,
LLAB_Specification, XYZ_to_LLAB)
from .nayatani95 import Nayatani95_Specification, XYZ_to_Nayatani95
from .rlab import (RLAB_VIEWING_CONDITIONS, RLAB_D_FACTOR, RLAB_Specification,
XYZ_to_RLAB)
__all__ = [
'Hunt_InductionFactors', 'HUNT_VIEWING_CONDITIONS', 'Hunt_Specification',
'XYZ_to_Hunt'
]
__all__ += ['ATD95_Specification', 'XYZ_to_ATD95']
__all__ += [
'CIECAM02_InductionFactors', 'CIECAM02_VIEWING_CONDITIONS',
'CIECAM02_Specification', 'XYZ_to_CIECAM02', 'CIECAM02_to_XYZ'
]
__all__ += [
'CAM16_InductionFactors', 'CAM16_VIEWING_CONDITIONS',
'CAM16_Specification', 'XYZ_to_CAM16', 'CAM16_to_XYZ'
]
__all__ += [
'LLAB_InductionFactors', 'LLAB_VIEWING_CONDITIONS', 'LLAB_Specification',
'XYZ_to_LLAB'
]
__all__ += ['Nayatani95_Specification', 'XYZ_to_Nayatani95']
__all__ += [
'RLAB_VIEWING_CONDITIONS', 'RLAB_D_FACTOR', 'RLAB_Specification',
'XYZ_to_RLAB'
]
| 38.317073
| 78
| 0.735837
| 170
| 1,571
| 6.123529
| 0.135294
| 0.215178
| 0.242075
| 0.059558
| 0.835735
| 0.708934
| 0.708934
| 0.708934
| 0.708934
| 0.708934
| 0
| 0.050038
| 0.173138
| 1,571
| 40
| 79
| 39.275
| 0.751347
| 0.013367
| 0
| 0.111111
| 0
| 0
| 0.307494
| 0.164729
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.222222
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b0d64d3f3dbb7eff9cf0c3b7454cf36632aa09be
| 14,547
|
py
|
Python
|
src/bibim/expr_func.py
|
rishubil/pybibim
|
3f29207feab250e87b00a97306deb562f8ca3e46
|
[
"MIT"
] | 1
|
2015-08-24T10:51:07.000Z
|
2015-08-24T10:51:07.000Z
|
src/bibim/expr_func.py
|
rishubil/pybibim
|
3f29207feab250e87b00a97306deb562f8ca3e46
|
[
"MIT"
] | 9
|
2016-07-19T17:21:13.000Z
|
2017-01-22T05:43:00.000Z
|
src/bibim/expr_func.py
|
bibim-lang/pybibim
|
3f29207feab250e87b00a97306deb562f8ca3e46
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from . import datatype
from .utils import safe_get_value, safe_get_evaled_expr
class Func:
""" Expr이 평가될 때 실행된 함수를 정의하는 class입니다.
세부적인 동작은 해당 class를 상속받은 class에서 정의합니다.
"""
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
return datatype.NULL_EXPR_INST
def log_string(self):
return "Func"
def log_expr(self):
return "F"
class FuncBowl(Func):
""" bowl operator ':'의 동작을 정의하는 class입니다. """
def __init__(self, bowl=None, nn=None):
""" FuncBowl를 생성합니다.
:param bowl: Expr를 가져올 Bowl instance
:type bowl: Bowl
:param nn: bowl의 noodle number
:type nn: Number
"""
self.bowl = bowl
self.nn = nn
def log_string(self):
return "FuncBowl(%s, %s)" % (self.bowl.log_string(), self.nn.log_string())
def log_expr(self):
return "%s:%s" % (self.bowl.log_expr(), self.nn.log_expr())
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
bowl = safe_get_value(self.bowl, datatype.Bowl)
if bowl is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
nn = safe_get_value(self.nn, datatype.Number)
if nn is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
try:
return safe_get_evaled_expr(bowl.get_noodle(nn).expr())
except KeyError:
return datatype.NULL_EXPR_INST
class FuncAssign(Func):
""" assign operator '='의 동작을 정의하는 class입니다. """
def __init__(self, bowl=None, nn=None, value_expr=None):
""" FuncAssign를 생성합니다.
만약 bowl이 None이라면 기본값으로 datatype.ValueExpr(datatype.MEM)를
사용합니다.
:param bowl: assign 대상
:type bowl: Bowl
:param nn: bowl의 noodle number
:type nn: Number
:param value_expr: assign할 Value를 가진 ValueExpr
:type value_expr: ValueExpr
"""
self.bowl = bowl if bowl is not None else datatype.ValueExpr(
datatype.MEM)
self.nn = nn
self.value_expr = value_expr
def log_string(self):
return "FuncAssign(%s, %s, %s)" % (self.bowl.log_string(), self.nn.log_string(), self.value_expr.log_string())
def log_expr(self):
return "%s:%s=%s" % (self.bowl.log_expr(), self.nn.log_expr(), self.value_expr.log_expr())
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
bowl = safe_get_value(self.bowl, datatype.Bowl)
if bowl is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
nn = safe_get_value(self.nn, datatype.Number)
if nn is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
value_expr = safe_get_evaled_expr(self.value_expr)
bowl.set_noodle(nn, value_expr)
return datatype.NULL_EXPR_INST
class FuncDeno(Func):
""" denominator operator '^'의 동작을 정의하는 class입니다. """
def __init__(self, number=None):
""" FuncDeno를 생성합니다.
:param number: 분모를 가져올 대상
:type number: Number
"""
self.number = number
def log_string(self):
return "FuncDeno(%s)" % (self.number.log_string(), )
def log_expr(self):
return "^%s" % (self.number.log_expr(), )
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
number = safe_get_value(self.number, datatype.Number)
if number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(number.denominator_number())
class FuncPlus(Func):
""" add operator '+'의 동작을 정의하는 class입니다. """
def __init__(self, l_number=None, r_number=None):
""" FuncPlus를 생성합니다.
:param l_number: operator의 좌측 number
:type l_number: Number
:param r_number: operator의 우측 number
:type r_number: Number
"""
self.l_number = l_number
self.r_number = r_number
def log_string(self):
return "FuncPlus(%s, %s)" % (self.l_number.log_string(), self.r_number.log_string())
def log_expr(self):
return "%s + %s" % (self.l_number.log_expr(), self.r_number.log_expr())
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
l_number = safe_get_value(self.l_number, datatype.Number)
if l_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
r_number = safe_get_value(self.r_number, datatype.Number)
if r_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(l_number.add(r_number))
class FuncMinus(Func):
""" subtract operator '-'의 동작을 정의하는 class입니다. """
def __init__(self, l_number=None, r_number=None):
""" FuncMinus를 생성합니다.
:param l_number: operator의 좌측 number
:type l_number: Number
:param r_number: operator의 우측 number
:type r_number: Number
"""
self.l_number = l_number
self.r_number = r_number
def log_string(self):
return "FuncMinus(%s, %s)" % (self.l_number.log_string(), self.r_number.log_string())
def log_expr(self):
return "%s - %s" % (self.l_number.log_expr(), self.r_number.log_expr())
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
l_number = safe_get_value(self.l_number, datatype.Number)
if l_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
r_number = safe_get_value(self.r_number, datatype.Number)
if r_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(l_number.sub(r_number))
class FuncMul(Func):
""" multiply operator '*'의 동작을 정의하는 class입니다. """
def __init__(self, l_number=None, r_number=None):
""" FuncMul를 생성합니다.
:param l_number: operator의 좌측 number
:type l_number: Number
:param r_number: operator의 우측 number
:type r_number: Number
"""
self.l_number = l_number
self.r_number = r_number
def log_string(self):
return "FuncMul(%s, %s)" % (self.l_number.log_string(), self.r_number.log_string())
def log_expr(self):
return "%s * %s" % (self.l_number.log_expr(), self.r_number.log_expr())
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
l_number = safe_get_value(self.l_number, datatype.Number)
if l_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
r_number = safe_get_value(self.r_number, datatype.Number)
if r_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(l_number.mul(r_number))
class FuncNumberSep(Func):
""" number separator '/'의 동작을 정의하는 class입니다. """
def __init__(self, l_number=None, r_number=None):
""" FuncNumberSep 생성합니다.
:param l_number: operator의 좌측 number
:type l_number: Number
:param r_number: operator의 우측 number
:type r_number: Number
"""
self.l_number = l_number
self.r_number = r_number
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
l_number = safe_get_value(self.l_number, datatype.Number)
if l_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
r_number = safe_get_value(self.r_number, datatype.Number)
if r_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(l_number.div(r_number))
def log_string(self):
return "FuncNumberSep(%s, %s)" % (self.l_number.log_string(), self.r_number.log_string())
def log_expr(self):
return "%s/%s" % (self.l_number.log_expr(), self.r_number.log_expr())
class FuncAnd(Func):
""" and operator '&'의 동작을 정의하는 class입니다. """
def __init__(self, l_number=None, r_number=None):
""" FuncAnd를 생성합니다.
:param l_number: operator의 좌측 number
:type l_number: Number
:param r_number: operator의 우측 number
:type r_number: Number
"""
self.l_number = l_number
self.r_number = r_number
def log_string(self):
return "FuncAnd(%s, %s)" % (self.l_number.log_string(), self.r_number.log_string())
def log_expr(self):
return "%s & %s" % (self.l_number.log_expr(), self.r_number.log_expr())
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
l_number = safe_get_value(self.l_number, datatype.Number)
if l_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
r_number = safe_get_value(self.r_number, datatype.Number)
if r_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(l_number._and(r_number))
class FuncOr(Func):
""" or operator '|'의 동작을 정의하는 class입니다. """
def __init__(self, l_number=None, r_number=None):
""" FuncOr를 생성합니다.
:param l_number: operator의 좌측 number
:type l_number: Number
:param r_number: operator의 우측 number
:type r_number: Number
"""
self.l_number = l_number
self.r_number = r_number
def log_string(self):
return "FuncOr(%s, %s)" % (self.l_number.log_string(), self.r_number.log_string())
def log_expr(self):
return "%s | %s" % (self.l_number.log_expr(), self.r_number.log_expr())
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
l_number = safe_get_value(self.l_number, datatype.Number)
if l_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
r_number = safe_get_value(self.r_number, datatype.Number)
if r_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(l_number._or(r_number))
class FuncNot(Func):
""" not operator '!'의 동작을 정의하는 class입니다. """
def __init__(self, number=None):
""" FuncNot을 생성합니다.
:param number: not을 취할 number
:type number: Number
"""
self.number = number
def log_string(self):
return "FuncNot(%s)" % (self.number.log_string(), )
def log_expr(self):
return "!%s" % (self.number.log_expr(), )
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
number = safe_get_value(self.number, datatype.Number)
if number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(number.not_f())
class FuncEq(Func):
""" equal operator '?='의 동작을 정의하는 class입니다. """
def __init__(self, l_number=None, r_number=None):
""" FuncEq를 생성합니다.
:param l_number: operator의 좌측 number
:type l_number: Number
:param r_number: operator의 우측 number
:type r_number: Number
"""
self.l_number = l_number
self.r_number = r_number
def log_string(self):
return "FuncEq(%s, %s)" % (self.l_number.log_string(), self.r_number.log_string())
def log_expr(self):
return "%s ?= %s" % (self.l_number.log_expr(), self.r_number.log_expr())
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
l_number = safe_get_value(self.l_number, datatype.Number)
if l_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
r_number = safe_get_value(self.r_number, datatype.Number)
if r_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(l_number.eq_f(r_number))
class FuncGt(Func):
""" greater than operator '>'의 동작을 정의하는 class입니다. """
def __init__(self, l_number=None, r_number=None):
""" FuncGt를 생성합니다.
:param l_number: operator의 좌측 number
:type l_number: Number
:param r_number: operator의 우측 number
:type r_number: Number
"""
self.l_number = l_number
self.r_number = r_number
def log_string(self):
return "FuncGt(%s, %s)" % (self.l_number.log_string(), self.r_number.log_string())
def log_expr(self):
return "%s > %s" % (self.l_number.log_expr(), self.r_number.log_expr())
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
l_number = safe_get_value(self.l_number, datatype.Number)
if l_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
r_number = safe_get_value(self.r_number, datatype.Number)
if r_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(l_number.gt_f(r_number))
class FuncLt(Func):
""" less than operator '<'의 동작을 정의하는 class입니다. """
def __init__(self, l_number=None, r_number=None):
""" FuncLt를 생성합니다.
:param l_number: operator의 좌측 number
:type l_number: Number
:param r_number: operator의 우측 number
:type r_number: Number
"""
self.l_number = l_number
self.r_number = r_number
def log_string(self):
return "FuncLt(%s, %s)" % (self.l_number.log_string(), self.r_number.log_string())
def log_expr(self):
return "%s < %s" % (self.l_number.log_expr(), self.r_number.log_expr())
def call(self):
""" Expr이 평가될 때 실행되는 method입니다.
:return: 평가된 결과
:rtype: datatype.ValueExpr
"""
l_number = safe_get_value(self.l_number, datatype.Number)
if l_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
r_number = safe_get_value(self.r_number, datatype.Number)
if r_number is datatype.NULL_INST:
return datatype.NULL_EXPR_INST
return datatype.ValueExpr(l_number.lt_f(r_number))
| 30.180498
| 118
| 0.616141
| 1,941
| 14,547
| 4.365791
| 0.068006
| 0.08178
| 0.058414
| 0.070097
| 0.850248
| 0.837031
| 0.829596
| 0.826174
| 0.826174
| 0.822634
| 0
| 0.000095
| 0.274421
| 14,547
| 481
| 119
| 30.243243
| 0.802748
| 0.23235
| 0
| 0.691244
| 0
| 0
| 0.02862
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.253456
| false
| 0
| 0.013825
| 0.129032
| 0.640553
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
b01e456fa2d32b2af6b0389a9e071f79e4dc144d
| 30,797
|
py
|
Python
|
routes/api/merchantApi.py
|
chojh04/BackOffice
|
d6a9d9be3689abfcb3e0a47b09b1589e74334ddc
|
[
"MIT"
] | null | null | null |
routes/api/merchantApi.py
|
chojh04/BackOffice
|
d6a9d9be3689abfcb3e0a47b09b1589e74334ddc
|
[
"MIT"
] | null | null | null |
routes/api/merchantApi.py
|
chojh04/BackOffice
|
d6a9d9be3689abfcb3e0a47b09b1589e74334ddc
|
[
"MIT"
] | null | null | null |
# -*- coding:utf-8 -*-
import json
from flask import Blueprint, request
from flask.globals import session
from util.common import paramEscape, getApiData, postApiData, getApiSingleData, \
strToLong, getParameter, getData, setStringToNumber, postData
from util.common import putApiData, deleteApiData, setNoneToBlank, \
request_get, request_post, request_put, API_SERVER_BACKOFFICE
from util.notification import KpcNotification
merchantApi = Blueprint("merchantApi", __name__)
@merchantApi.route("/api/totalMerchants", methods=['GET'])
def totalMerchants():
form_data = json.loads(request.args.get("formData"))
if(form_data["target"] == ""):
queryData = {
'limit': setStringToNumber(request.args.get("length")),
'offset': setStringToNumber(request.args.get("start")),
'representId': getParameter(form_data,"merchantId"),
'name': getParameter(form_data,"name"),
'alias':getParameter(form_data,"alias"),
'status': getParameter(form_data,"status"),
'bizRegNo': paramEscape(getParameter(form_data,"bizRegNo")),
}
result_data = getApiData("/merchants/totalMerchants" ,queryData)
return json.dumps(result_data)
elif(form_data["target"] == "2"):
return merchants()
return represents()
@merchantApi.route("/api/merchants/merchant/represent", methods=['POST','GET', "PUT", "DELETE"])
def represent():
if (request.method == 'GET') :
return getRepresent()
elif (request.method == 'POST') :
return createMerchant()
elif (request.method == 'PUT') :
return updateMerchant()
elif (request.method == 'DELETE') :
return deleteRepresent()
def createMerchant():
form_data = request.json
merchantData = {
"name" : getParameter(form_data,"name"),
"alias" : getParameter(form_data,"alias"),
"ceoName" : getParameter(form_data,"ceoName"),
"openDate" : paramEscape(getParameter(form_data,"openDate")),
"bizRegNo" : paramEscape(getParameter(form_data,"bizRegNo")),
"corpRegNo" : paramEscape(getParameter(form_data,"corpRegNo")),
"bizKind" : getParameter(form_data,"bizKind"),
"bizCond" : getParameter(form_data,"bizCond"),
"bizGrp" : getParameter(form_data,"bizGrp"),
"useFlag" : getParameter(form_data,"useFlag"),
"zipCode" : getParameter(form_data,"zipCode"),
"address" : getParameter(form_data,"address"),
"addressDetail" : getParameter(form_data,"addressDetail"),
"tel" : paramEscape(getParameter(form_data,"tel")),
"fax" : paramEscape(getParameter(form_data,"fax")),
"reqEmpId" : session['empId'],
"apprEmpId" : getParameter(form_data,"apprEmpId"),
}
#return json.dumps(postApiData("/merchants/represent", postRepresentData))
toNotiData = {
"toEmpId": getParameter(form_data,"apprEmpId")
,"fromEmpId": session['empId']
,"message" : "대표 거래처 등록 승인 신청"
}
fromNotiData = {
"toEmpId": session['empId']
,"fromEmpId": getParameter(form_data,"apprEmpId")
,"message" : "대표 거래처 등록 승인 신청"
}
# resultData = postData("/approvals/merchant/insert", merchantData,{})
reponseResult = request_post("/approvals/request/merchant", merchantData, '1')
if "status" in reponseResult and reponseResult["status"] == "200" :
KpcNotification().push_user_notification(toNotiData)
KpcNotification().push_user_notification(fromNotiData)
return json.dumps(reponseResult)
def updateMerchant():
form_data = request.json
merchantData = {
"merchantId" : getParameter(form_data,"merchantId"), # : String,
"name" : getParameter(form_data,"name"),
"alias" : getParameter(form_data,"alias"),
"ceoName" : getParameter(form_data,"ceoName"),
"openDate" : paramEscape(getParameter(form_data,"openDate")),
"bizRegNo" : paramEscape(getParameter(form_data,"bizRegNo")),
"corpRegNo" : paramEscape(getParameter(form_data,"corpRegNo")),
"bizKind" : getParameter(form_data,"bizKind"),
"bizCond" : getParameter(form_data,"bizCond"),
"bizGrp" : getParameter(form_data,"bizGrp"),
"useFlag" : getParameter(form_data,"useFlag"),
"zipCode" : getParameter(form_data,"zipCode"),
"address" : getParameter(form_data,"address"),
"addressDetail" : getParameter(form_data,"addressDetail"),
"tel" : paramEscape(getParameter(form_data,"tel")),
"fax" : paramEscape(getParameter(form_data,"fax")),
"createDesc" : "수정 요청",
"reqMemo" : getParameter(form_data,"reqMemo"),
"reqEmpId" : session['empId'],
"apprEmpId" : getParameter(form_data,"apprEmpId")
}
toNotiData = {
"toEmpId": getParameter(form_data,"apprEmpId")
,"fromEmpId": session['empId']
,"message" : "대표 거래처 수정 승인 신청"
}
fromNotiData = {
"toEmpId": session['empId']
,"fromEmpId": getParameter(form_data,"apprEmpId")
,"message" : "대표 거래처 수정 승인 신청"
}
reponseResult = request_put("/approvals/request/merchant", merchantData, '1')
if "status" in reponseResult and reponseResult["status"] == "200" :
KpcNotification().push_user_notification(toNotiData)
KpcNotification().push_user_notification(fromNotiData)
return json.dumps(reponseResult)
def deleteRepresent():
form_data = request.json
queryData = {
"merchantId" : getParameter(form_data,"merchantId"), # : String,
"reqEmpId" : session['empId'],
"apprEmpId" : getParameter(form_data,"apprEmpId"),
"reqMemo" : getParameter(form_data,"reqMemo"),
}
toNotiData = {
"toEmpId": getParameter(form_data,"apprEmpId")
,"fromEmpId": session['empId']
,"message" : "대표 거래처 삭제 승인 신청"
}
fromNotiData = {
"toEmpId": session['empId']
,"fromEmpId": getParameter(form_data,"apprEmpId")
,"message" : "대표 거래처 삭제 승인 신청"
}
reponseResult = request_put("/approvals/request/merchant/delete", queryData, '1')
if "status" in reponseResult and reponseResult["status"] == "200" :
KpcNotification().push_user_notification(toNotiData)
KpcNotification().push_user_notification(fromNotiData)
return json.dumps(reponseResult)
#대표 거래처 정보 조회.
@merchantApi.route("/api/merchants/representative/<merchantId>", methods=['GET'])
def getRepresentativeMerchant(merchantId):
resultData = request_get("/merchants/representative/"+merchantId, None, API_SERVER_BACKOFFICE)
return json.dumps(resultData)
@merchantApi.route("/api/merchants/represents", methods=['GET'])
def represents():
form_data = json.loads(request.args.get("formData"))
queryData = {
'limit': setStringToNumber(request.args.get("length")),
'offset': setStringToNumber(request.args.get("start")),
'representId': getParameter(form_data,"merchantId"),
'name': getParameter(form_data,"name"),
'alias':getParameter(form_data,"alias"),
'status': getParameter(form_data,"status"),
'bizRegNo': paramEscape(getParameter(form_data,"bizRegNo")),
}
result_data = getApiData("/merchants/represents" ,queryData)
print result_data
return json.dumps(result_data)
@merchantApi.route("/api/merchants", methods=['GET'])
def merchants():
form_data = json.loads(request.args.get("formData"))
queryData = {
'limit': setStringToNumber(request.args.get("length")),
'offset': setStringToNumber(request.args.get("start")),
'merchantId': getParameter(form_data,"merchantId"),
'name': getParameter(form_data,"name"),
'alias': getParameter(form_data,"alias"),
'depth': getParameter(form_data,"depth"),
'childId': getParameter(form_data,"childId"),
'status': getParameter(form_data,"status"),
'bizRegNo': paramEscape(getParameter(form_data,"bizRegNo")),
}
print queryData
result_data = getApiData("/merchants" ,queryData)
print result_data
return json.dumps(result_data)
@merchantApi.route("/api/merchants/merchant/bznoCheck", methods=['GET'])
def bznoCheck():
queryData = {
'merchantId': getParameter({},"merchantId"),
'bizRegNo': paramEscape(getParameter({},"bizRegNo")),
}
result_data = getData("/merchants/merchant/bznoCheck" ,queryData)
return json.dumps(result_data)
@merchantApi.route("/api/merchants/merchant/svcConnIdCheck", methods=['GET'])
def svcConnIdCheck():
queryData = {
'svcConnId': getParameter({},"svcConnIdCheck"),
'serviceId': "",
}
result_data = getData("/merchants/merchant/svcConnIdCheck" ,queryData)
return json.dumps(result_data)
@merchantApi.route("/api/merchants/merchant/corpNoCheck", methods=['GET'])
def corpNoCheck():
queryData = {
'merchantId': getParameter({},"merchantId"),
'corpRegNo': paramEscape(getParameter({},"corpRegNo")),
}
result_data = getData("/merchants/merchant/corpNoCheck" ,queryData)
return json.dumps(result_data)
def getRepresent():
url = request.args.get("url")
if url is None :
url = "/merchants/represent?representId=" + request.args.get("merchantId")
result_data = getApiSingleData(url ,{})
print result_data
return json.dumps(result_data)
@merchantApi.route("/api/merchants/merchant", methods=['POST','GET', "PUT", "DELETE"])
def merchant():
if (request.method == 'GET') :
return getMerchant()
elif (request.method == 'POST') :
return createSubMerchant()
elif (request.method == 'PUT') :
return putMerchant()
elif (request.method == 'DELETE') :
return deleteMerchant()
def getMerchant():
url = request.args.get("url")
if url is None :
url = "/merchants/merchant?merchantId=" + setNoneToBlank(request.args.get("merchantId"))
result_data = getApiSingleData(url ,{})
return json.dumps(result_data)
def createSubMerchant():
form_data = request.json
subMerchantData = {
"parentId" : getParameter(form_data,"parentId"),
"name" : getParameter(form_data,"name"),
"alias" : getParameter(form_data,"alias"),
"ceoName" : getParameter(form_data,"ceoName"),
"openDate" : paramEscape(getParameter(form_data,"openDate")) ,
"bizRegNo" : paramEscape(getParameter(form_data,"bizRegNo")),
"corpRegNo" : paramEscape(getParameter(form_data,"corpRegNo")),
"bizKind" : getParameter(form_data,"bizKind"),
"bizCond" : getParameter(form_data,"bizCond"),
"zipCode" : getParameter(form_data,"zipcode"),
"address" : getParameter(form_data,"address"),
"addressDetail" : getParameter(form_data,"addressDetail"),
"type" : getParameter(form_data,"bizGrp"),
"tel" : paramEscape(getParameter(form_data,"tel")),
"fax" : paramEscape(getParameter(form_data,"fax")),
"taxCustName" : getParameter(form_data,"taxCustNm"),
"taxTel" : paramEscape(getParameter(form_data,"taxTel")),
"taxFax" : paramEscape(getParameter(form_data,"taxFax")),
"taxPhone" : paramEscape(getParameter(form_data,"taxPhone")),
"taxEmail" : getParameter(form_data,"taxEmail"),
"bankName" : getParameter(form_data,"bankNm"),
"bankAccountNo" : getParameter(form_data,"bankAccNo"),
"bankHolder" : getParameter(form_data,"bankHolder"),
"salesName" : getParameter(form_data,"salesNm"),
"salesTel" : getParameter(form_data,"salesTel"),
"billingName" : getParameter(form_data,"billingNm"),
"billingTel" : getParameter(form_data,"billingTel"),
"kpcSalesName" : getParameter(form_data,"kpcSalesNm"),
"kpcSalesTel" : getParameter(form_data,"kpcSalesTel"),
"kpcBillingName" : getParameter(form_data,"kpcBillingNm"),
"kpcBillingTel" : getParameter(form_data,"kpcBillingTel"),
"agentId" : getParameter(form_data,"agentId"),
"agentPw" : getParameter(form_data,"agentPw"),
"useFlag" : getParameter(form_data,"useFlag"),
"urlHome" : getParameter(form_data,"urlHome"),
"reqEmpId" : session['empId'],
"apprEmpId" : getParameter(form_data,"apprEmpId"),
}
reponseResult = request_post("/approvals/request/sub-merchant", subMerchantData, '1')
return json.dumps(reponseResult)
def putMerchant():
form_data = request.json
subMerchantData = {
"subMerchantId" : getParameter(form_data,"subMerchantId"),
"parentId" : getParameter(form_data,"parentId"),
"name" : getParameter(form_data,"name"),
"alias" : getParameter(form_data,"alias"),
"ceoName" : getParameter(form_data,"ceoName"),
"openDate" : paramEscape(getParameter(form_data,"openDate")) ,
"bizRegNo" : paramEscape(getParameter(form_data,"bizRegNo")),
"corpRegNo" : paramEscape(getParameter(form_data,"corpRegNo")),
"bizKind" : getParameter(form_data,"bizKind"),
"bizCond" : getParameter(form_data,"bizCond"),
"zipCode" : getParameter(form_data,"zipcode"),
"address" : getParameter(form_data,"address"),
"addressDetail" : getParameter(form_data,"addressDetail"),
"type" : getParameter(form_data,"bizGrp"),
"tel" : paramEscape(getParameter(form_data,"tel")),
"fax" : paramEscape(getParameter(form_data,"fax")),
"taxCustName" : getParameter(form_data,"taxCustNm"),
"taxTel" : paramEscape(getParameter(form_data,"taxTel")),
"taxFax" : paramEscape(getParameter(form_data,"taxFax")),
"taxPhone" : paramEscape(getParameter(form_data,"taxPhone")),
"taxEmail" : getParameter(form_data,"taxEmail"),
"bankName" : getParameter(form_data,"bankNm"),
"bankAccountNo" : getParameter(form_data,"bankAccNo"),
"bankHolder" : getParameter(form_data,"bankHolder"),
"salesName" : getParameter(form_data,"salesNm"),
"salesTel" : getParameter(form_data,"salesTel"),
"billingName" : getParameter(form_data,"billingNm"),
"billingTel" : getParameter(form_data,"billingTel"),
"kpcSalesName" : getParameter(form_data,"kpcSalesNm"),
"kpcSalesTel" : getParameter(form_data,"kpcSalesTel"),
"kpcBillingName" : getParameter(form_data,"kpcBillingNm"),
"kpcBillingTel" : getParameter(form_data,"kpcBillingTel"),
"agentId" : getParameter(form_data,"agentId"),
"agentPw" : getParameter(form_data,"agentPw"),
"useFlag" : getParameter(form_data,"useFlag"),
"urlHome" : getParameter(form_data,"urlHome"),
"reqEmpId" : session['empId'],
"reqMemo" : getParameter(form_data,"reqMemo"),
"apprEmpId" : getParameter(form_data,"apprEmpId"),
}
reponseResult = request_put("/approvals/request/sub-merchant", subMerchantData, '1')
return json.dumps(reponseResult)
def deleteMerchant():
form_data = request.json
queryData = {
"subMerchantId" : getParameter(form_data,"subMerchantId"), # : String,
"reqEmpId" : session['empId'],
"apprEmpId" : getParameter(form_data,"apprEmpId"),
"reqMemo" : getParameter(form_data,"reqMemo"),
}
reponseResult = request_put("/approvals/request/sub-merchant/delete", queryData, '1')
return json.dumps(reponseResult)
@merchantApi.route("/api/merchants/sub-merchant/<subMerchantId>/path", methods=['GET'])
def getSubMerchantPath(subMerchantId):
resultData = request_get("/merchants/sub-merchant/"+subMerchantId+"/path", None, API_SERVER_BACKOFFICE)
return json.dumps(resultData)
@merchantApi.route("/api/merchants/services", methods=['GET'])
def services():
formData = json.loads(request.args.get("formData"))
queryData = {
'limit': setStringToNumber(request.args.get("length")),
'offset': setStringToNumber(request.args.get("start")),
'merchantId': getParameter(formData, "submerchantId"),
'name': getParameter(formData, "name"),
'useFlag': getParameter(formData, "useFlag"),
'serviceId': getParameter(formData, "serviceId"),
'serviceType': getParameter(formData, "serviceType"),
'merchantName': getParameter(formData, "merchantName"),
'billingRegFlag': getParameter(formData, "billingRegFlag"),
'svcConnId': getParameter(formData, "svcConnId")
}
result_data = getApiData("/merchants/services" ,queryData)
return json.dumps(result_data)
@merchantApi.route("/api/merchants/billings", methods=['GET'])
def billings():
formData = json.loads(request.args.get("formData"))
serviceId = getParameter(formData, "serviceId");
resultData = request_get("/sub-merchant/"+serviceId+"/billing/commision-histories", None, API_SERVER_BACKOFFICE)
return json.dumps(resultData)
@merchantApi.route("/api/merchants/services/service", methods=['POST','GET', "PUT", "DELETE"])
def service():
if (request.method == 'GET') :
return getService()
elif (request.method == 'POST') :
return postService()
elif (request.method == 'PUT') :
return putService()
elif (request.method == 'DELETE') :
return deleteService()
def getService():
url = request.args.get("url")
if url is None :
url = "/merchants/services/service?serviceId=" + request.args.get("serviceId")
result_data = getApiSingleData(url ,{})
print result_data
return json.dumps(result_data)
def postService():
form_data = request.json
serviceData = {
"subMerchantId" : getParameter(form_data, "submerchantId"),
"serviceName" : getParameter(form_data, "serviceName"),
"category" : getParameter(form_data, "category"),
"type" : getParameter(form_data, "type"),
"saleDivider" : getParameter(form_data, "saleDivider"),
"useFlag" : getParameter(form_data, "useFlag"),
"svcConnId" : getParameter(form_data, "svcConnId"),
"svcConnPw" : getParameter(form_data, "svcConnPw"),
"agentId" : getParameter(form_data, "agentId"),
"agentPw" : getParameter(form_data, "agentPw"),
"createDesc" : "신규등록" ,
"createAdmId" : session['empId'],
"reqEmpId" : session['empId'],
"apprEmpId" : getParameter(form_data,"apprEmpId"),
}
# return json.dumps(postApiData("/merchants/services/service", postServiceData))
reponseResult = request_post("/approvals/request/sub-merchant/service", serviceData, '1')
return json.dumps(reponseResult)
def putService():
form_data = request.json
serviceData = {
"serviceId" : getParameter(form_data, "serviceId"),
"subMerchantId" : getParameter(form_data, "submerchantId"),
"serviceName" : getParameter(form_data, "serviceName"),
"category" : getParameter(form_data, "category"),
"type" : getParameter(form_data, "type"),
"useFlag" : getParameter(form_data, "useFlag"),
"saleDivider" : getParameter(form_data, "saleDivider"),
"svcConnId" : getParameter(form_data, "svcConnId"),
"svcConnPw" : getParameter(form_data, "svcConnPw"),
"updateAdmId" : session['empId'],
"reqEmpId" : session['empId'],
"reqMemo" : getParameter(form_data,"reqMemo"),
"apprEmpId" : getParameter(form_data,"apprEmpId")
}
reponseResult = request_put("/approvals/request/sub-merchant/service", serviceData, '1')
return json.dumps(reponseResult)
def deleteService():
form_data = request.json
queryData = {
"serviceId" : getParameter(form_data,"serviceId"), # : String,
"reqEmpId" : session['empId'],
"apprEmpId" : getParameter(form_data,"apprEmpId"),
"reqMemo" : getParameter(form_data,"reqMemo"),
}
reponseResult = request_put("/approvals/request/sub-merchant/service/delete", queryData, '1')
return json.dumps(reponseResult)
@merchantApi.route("/api/merchants/services/service/billing", methods=['POST','GET', "PUT", "DELETE"])
def billing():
if (request.method == 'GET') :
return getBilling()
elif (request.method == 'POST') :
return postBilling()
elif (request.method == 'PUT') :
return putBilling()
elif (request.method == 'DELETE') :
return deleteBilling()
def getBilling():
result_data = getApiSingleData("/merchants/services/service/billing" ,{"serviceBillingId" : getParameter({}, "serviceBillingId")})
print result_data
return json.dumps(result_data)
#서비스정산 등록
def postBilling():
form_data = request.json
billingData = {
"serviceId" : getParameter(form_data,"serviceId"),
"name" : getParameter(form_data,"name"),
"bankCode" : getParameter(form_data,"bankCode"),
"bankAccountNo" : getParameter(form_data,"bankAccNo"),
"bankHolder" : getParameter(form_data,"bankHolder"),
"managerName" : getParameter(form_data,"managerName"),
"managerTel" : getParameter(form_data,"managerTel"),
"managerEmail" : getParameter(form_data,"managerEmail"),
"kpcManagerName" : getParameter(form_data,"kpcManagerName"),
"kpcManagerTel" : getParameter(form_data,"kpcManagerTel"),
"kpcManagerEmail" : getParameter(form_data,"kpcManagerEmail"),
"code" : getParameter(form_data,"code"),
"divider" : getParameter(form_data,"divider"),
"billingDate" : paramEscape(getParameter(form_data,"billingDt")),
"billingStartDate" : paramEscape(getParameter(form_data,"billingStartDate")),
"billingDuration" : getParameter(form_data,"billingDuration"),
"billingCommisionType" : getParameter(form_data,"billingCommType" ),
"merchantCommisionType" : getParameter(form_data,"merchantCommType"),
"merchantCommision": getParameter(form_data,"merchantCommision"),
"merchantTaxType" : getParameter(form_data,"merchantTaxType"),
"createAdmId" : session['empId'],
"reqEmpId" : session['empId'],
"apprEmpId" : getParameter(form_data,"apprEmpId")
}
reponseResult = request_post("/approvals/request/sub-merchant/service/billing", billingData, '1')
return json.dumps(reponseResult)
# return json.dumps(postApiData("/merchants/services/service/billing", postBillingData))
def putBilling():
form_data = request.json
print "aplEndDate : " + paramEscape(getParameter(form_data ,"aplEndDate"))
billingData = {
"serviceBillingId" : getParameter(form_data,"serviceBillingId"),
"commisionId" : getParameter(form_data,"commisionId"),
"name" : getParameter(form_data,"name"),
"bankCode" : getParameter(form_data,"bankCode"),
"bankAccountNo" : getParameter(form_data,"bankAccNo"),
"bankHolder" : getParameter(form_data,"bankHolder"),
"managerName" : getParameter(form_data,"managerName"),
"managerTel" : getParameter(form_data,"managerTel"),
"managerEmail" : getParameter(form_data,"managerEmail"),
"kpcManagerName" : getParameter(form_data,"kpcManagerName"),
"kpcManagerTel" : getParameter(form_data,"kpcManagerTel"),
"kpcManagerEmail" : getParameter(form_data,"kpcManagerEmail"),
"code" : getParameter(form_data,"billingCode"),
"divider" : getParameter(form_data,"billingDivider"),
"billingDate" : paramEscape(getParameter(form_data,"billingDt")),
"billingDuration" : getParameter(form_data,"billingDuration"),
"billingCommisionType" : getParameter(form_data,"billingCommType" ),
"merchantCommisionType" : getParameter(form_data,"merchantCommType"),
"merchantCommision": getParameter(form_data,"merchantCommision"),
"merchantTaxType" : getParameter(form_data,"merchantTaxType"),
"createAdmId" : session['empId'],
"reqEmpId" : session['empId'],
"reqMemo" : getParameter(form_data,"reqMemo"),
"apprEmpId" : getParameter(form_data,"apprEmpId")
}
# return json.dumps(putApiData("/merchants/services/service/billing", putBillingData , {}))
reponseResult = request_put("/approvals/request/sub-merchant/service/billing", billingData, '1')
return json.dumps(reponseResult)
#미사용
def deleteBilling():
queryData = {
'billingId': setNoneToBlank(request.args.get("billingId")),
}
return json.dumps(deleteApiData("/merchants/services/service/billing", queryData))
@merchantApi.route("/api/merchants/services/service/billing/commision", methods=['POST', "PUT", "DELETE"])
def billingCommision():
if (request.method == 'POST') :
return postBillingCommision()
elif (request.method == 'PUT') :
return putBillingCommision()
elif (request.method == 'DELETE') :
return deleteBilling()
def postBillingCommision():
form_data = request.json
billingData = {
"serviceBillingId" : getParameter(form_data,"serviceBillingId"),
"code" : getParameter(form_data,"billingCode"),
"divider" : getParameter(form_data,"billingDivider"),
"billingDate" : paramEscape(getParameter(form_data,"billingDt")),
"billingDuration" : getParameter(form_data,"billingDuration"),
"billingCommisionType" : getParameter(form_data,"billingCommType" ),
"merchantCommisionType" : getParameter(form_data,"merchantCommType"),
"merchantCommision": getParameter(form_data,"merchantCommision"),
"merchantTaxType" : getParameter(form_data,"merchantTaxType"),
"billingStartDate" : paramEscape(getParameter(form_data,"billingStartDate")),
"beforeBillingEndDate" : getParameter(form_data,"beforeBillingEndDate"),
"createAdmId" : session['empId'],
"reqEmpId" : session['empId'],
"apprEmpId" : getParameter(form_data,"apprEmpId")
}
reponseResult = request_post("/approvals/request/sub-merchant/service/billing/commision", billingData, '1')
return json.dumps(reponseResult)
def putBillingCommision():
form_data = request.json
billingData = {
"serviceBillingId" : getParameter(form_data,"serviceBillingId"),
"commisionId" : getParameter(form_data,"commisionId"),
"code" : getParameter(form_data,"billingCode"),
"divider" : getParameter(form_data,"billingDivider"),
"billingDate" : paramEscape(getParameter(form_data,"billingDt")),
"billingDuration" : getParameter(form_data,"billingDuration"),
"billingCommisionType" : getParameter(form_data,"billingCommType" ),
"merchantCommisionType" : getParameter(form_data,"merchantCommType"),
"merchantCommision": getParameter(form_data,"merchantCommision"),
"merchantTaxType" : getParameter(form_data,"merchantTaxType"),
"billingStartDate" : paramEscape(getParameter(form_data,"billingStartDate")),
"beforeBillingEndDate" : getParameter(form_data,"beforeBillingEndDate"),
"createAdmId" : session['empId'],
"reqEmpId" : session['empId'],
"reqMemo" : getParameter(form_data,"reqMemo"),
"apprEmpId" : getParameter(form_data,"apprEmpId")
}
reponseResult = request_put("/approvals/request/sub-merchant/service/billing/commision", billingData, '1')
return json.dumps(reponseResult)
#서비스 정산 ID로 정산정보 조회
@merchantApi.route("/api/merchants/sub-merchant/billing/<commisionId>", methods=['GET'])
def readServieBilling(commisionId):
resultData = request_get("/sub-merchant/billing/"+commisionId, None, API_SERVER_BACKOFFICE)
return json.dumps(resultData)
#서비스 정산 ID로 서비스의 마지막 정산정보 조회
@merchantApi.route("/api/merchants/sub-merchant/billing/<commisionId>/<searchType>", methods=['GET'])
def readServieLastBilling(commisionId,searchType):
resultData = request_get("/sub-merchant/billing/"+commisionId+"/"+searchType, None, API_SERVER_BACKOFFICE)
return json.dumps(resultData)
#수수료ID로 정산정보 조회
@merchantApi.route("/api/merchants/sub-merchant/billing/commision/<commisionId>", methods=['GET'])
def readServieBillingByCommisionId(commisionId):
resultData = request_get("/sub-merchant/billing/commision/"+commisionId, None, API_SERVER_BACKOFFICE)
return json.dumps(resultData)
| 47.526235
| 134
| 0.613858
| 2,555
| 30,797
| 7.27045
| 0.092759
| 0.107666
| 0.249785
| 0.061746
| 0.832257
| 0.754576
| 0.730728
| 0.708064
| 0.687285
| 0.676895
| 0
| 0.001039
| 0.24983
| 30,797
| 647
| 135
| 47.599691
| 0.803013
| 0.018346
| 0
| 0.696915
| 0
| 0
| 0.239756
| 0.05567
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.010889
| null | null | 0.016334
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b02554099459f6c2823d52b5a6cc2a14661ed080
| 49
|
py
|
Python
|
main_app/mixins/__init__.py
|
radekska/django-network-controller
|
6bcb847cbe1efa7dee118974de5e49b4f411e5da
|
[
"MIT"
] | null | null | null |
main_app/mixins/__init__.py
|
radekska/django-network-controller
|
6bcb847cbe1efa7dee118974de5e49b4f411e5da
|
[
"MIT"
] | null | null | null |
main_app/mixins/__init__.py
|
radekska/django-network-controller
|
6bcb847cbe1efa7dee118974de5e49b4f411e5da
|
[
"MIT"
] | null | null | null |
from .JSONResponseMixin import JSONResponseMixin
| 24.5
| 48
| 0.897959
| 4
| 49
| 11
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b03c4a6bd1a98a40d31456e0f4e980da597487e7
| 5,927
|
py
|
Python
|
ivy_tests/test_ivy/test_functional/test_nn/test_activations.py
|
VedPatwardhan/ivy
|
7b2105fa8cf38879444a1029bfaa7f0b2f27717a
|
[
"Apache-2.0"
] | 1
|
2022-02-13T19:35:02.000Z
|
2022-02-13T19:35:02.000Z
|
ivy_tests/test_ivy/test_functional/test_nn/test_activations.py
|
Arijit1000/ivy
|
de193946a580ca0f54d78fe7fc4031a6ff66d2bb
|
[
"Apache-2.0"
] | null | null | null |
ivy_tests/test_ivy/test_functional/test_nn/test_activations.py
|
Arijit1000/ivy
|
de193946a580ca0f54d78fe7fc4031a6ff66d2bb
|
[
"Apache-2.0"
] | null | null | null |
"""Collection of tests for unified neural network activation functions."""
# global
import numpy as np
from hypothesis import given, strategies as st
# local
import ivy
import ivy_tests.test_ivy.helpers as helpers
import ivy.functional.backends.numpy as ivy_np
# relu
@given(
dtype_and_x=helpers.dtype_and_values(ivy_np.valid_float_dtypes),
as_variable=st.booleans(),
with_out=st.booleans(),
native_array=st.booleans(),
num_positional_args=st.integers(0, 2),
container=st.booleans(),
instance_method=st.booleans(),
)
def test_relu(
dtype_and_x,
as_variable,
with_out,
native_array,
num_positional_args,
container,
instance_method,
fw,
):
dtype, x = dtype_and_x
x = np.asarray(x, dtype=dtype)
if x.shape == ():
return
if fw == "torch" and dtype == "float16":
return
helpers.test_array_function(
dtype,
as_variable,
with_out,
native_array,
fw,
num_positional_args,
container,
instance_method,
"relu",
x=x,
)
# leaky_relu
@given(
dtype_and_x=helpers.dtype_and_values(ivy_np.valid_float_dtypes),
as_variable=helpers.list_of_length(st.booleans(), 2),
native_array=st.booleans(),
num_positional_args=st.integers(0, 2),
container=helpers.list_of_length(st.booleans(), 2),
instance_method=st.booleans(),
alpha=st.floats(),
)
def test_leaky_relu(
dtype_and_x,
alpha,
as_variable,
num_positional_args,
container,
instance_method,
native_array,
fw,
):
dtype, x = dtype_and_x
if not ivy.all(ivy.isfinite(ivy.array(x))) or not ivy.isfinite(ivy.array([alpha])):
return
if fw == "torch" and dtype == "float16":
return
helpers.test_array_function(
dtype,
as_variable,
False,
native_array,
fw,
num_positional_args,
container,
instance_method,
"leaky_relu",
x=np.asarray(x, dtype=dtype),
alpha=alpha,
)
# gelu
@given(
dtype_and_x=helpers.dtype_and_values(ivy_np.valid_float_dtypes),
as_variable=st.booleans(),
native_array=st.booleans(),
num_positional_args=st.integers(0, 2),
container=st.booleans(),
instance_method=st.booleans(),
approximate=st.booleans(),
)
def test_gelu(
dtype_and_x,
as_variable,
approximate,
num_positional_args,
container,
instance_method,
native_array,
fw,
):
dtype, x = dtype_and_x
if fw == "torch" and dtype == "float16":
return
x = np.asarray(x, dtype=dtype)
helpers.test_array_function(
dtype,
as_variable,
False,
native_array,
fw,
num_positional_args,
container,
instance_method,
"gelu",
x=x,
approximate=approximate,
)
# tanh
@given(
dtype_and_x=helpers.dtype_and_values(ivy_np.valid_float_dtypes),
as_variable=st.booleans(),
native_array=st.booleans(),
num_positional_args=st.integers(0, 2),
container=st.booleans(),
instance_method=st.booleans(),
)
def test_tanh(
dtype_and_x,
as_variable,
num_positional_args,
container,
instance_method,
native_array,
fw,
):
dtype, x = dtype_and_x
if fw == "torch" and dtype == "float16":
return
helpers.test_array_function(
dtype,
as_variable,
False,
native_array,
fw,
num_positional_args,
container,
instance_method,
"tanh",
x=np.asarray(x, dtype=dtype),
)
# sigmoid
@given(
dtype_and_x=helpers.dtype_and_values(ivy_np.valid_float_dtypes),
as_variable=st.booleans(),
native_array=st.booleans(),
num_positional_args=st.integers(0, 2),
container=st.booleans(),
instance_method=st.booleans(),
)
def test_sigmoid(
dtype_and_x,
as_variable,
num_positional_args,
container,
instance_method,
native_array,
fw,
):
dtype, x = dtype_and_x
if fw == "torch" and dtype == "float16":
return
helpers.test_array_function(
dtype,
as_variable,
False,
native_array,
fw,
num_positional_args,
container,
instance_method,
"sigmoid",
x=np.asarray(x, dtype=dtype),
)
# softmax
@given(
dtype_and_x=helpers.dtype_and_values(ivy.all_float_dtypes),
as_variable=st.booleans(),
native_array=st.booleans(),
num_positional_args=st.integers(0, 2),
container=st.booleans(),
instance_method=st.booleans(),
)
def test_softmax(
dtype_and_x,
as_variable,
num_positional_args,
container,
instance_method,
native_array,
fw,
):
dtype, x = dtype_and_x
axis = -1
if fw == "torch" and dtype == "float16":
return
x = np.asarray(x, dtype=dtype)
if x.shape == ():
return
helpers.test_array_function(
dtype,
as_variable,
False,
native_array,
fw,
num_positional_args,
container,
instance_method,
"softmax",
x=x,
axis=axis,
)
# softplus
@given(
dtype_and_x=helpers.dtype_and_values(ivy_np.valid_float_dtypes),
as_variable=st.booleans(),
native_array=st.booleans(),
num_positional_args=st.integers(0, 2),
container=st.booleans(),
instance_method=st.booleans(),
)
def test_softplus(
dtype_and_x,
as_variable,
num_positional_args,
container,
instance_method,
native_array,
fw,
):
dtype, x = dtype_and_x
if fw == "torch" and dtype == "float16":
return
helpers.test_array_function(
dtype,
as_variable,
False,
native_array,
fw,
num_positional_args,
container,
instance_method,
"softplus",
x=np.asarray(x, dtype=dtype),
)
| 21.474638
| 87
| 0.622069
| 729
| 5,927
| 4.762689
| 0.102881
| 0.086406
| 0.054435
| 0.104839
| 0.842742
| 0.834677
| 0.783986
| 0.766705
| 0.766705
| 0.740495
| 0
| 0.007184
| 0.271976
| 5,927
| 275
| 88
| 21.552727
| 0.797451
| 0.02244
| 0
| 0.814516
| 0
| 0
| 0.022145
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028226
| false
| 0
| 0.020161
| 0
| 0.08871
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c669c5cdd4dc02426d167e8a759e2bed9e514cde
| 49
|
py
|
Python
|
desktop/core/ext-py/nose-1.3.7/functional_tests/support/coverage/tests/test_covered.py
|
kokosing/hue
|
2307f5379a35aae9be871e836432e6f45138b3d9
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
desktop/core/ext-py/nose-1.3.7/functional_tests/support/coverage/tests/test_covered.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
desktop/core/ext-py/nose-1.3.7/functional_tests/support/coverage/tests/test_covered.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
import blah
def test_blah():
blah.dostuff()
| 9.8
| 18
| 0.673469
| 7
| 49
| 4.571429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204082
| 49
| 4
| 19
| 12.25
| 0.820513
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c6e43ecd17c505d382b35069c6e5684ed1a499ef
| 5,922
|
py
|
Python
|
torchocr/datasets/txt_reader.py
|
hua1024/OpenOCR
|
13ecfd18d103d5e70a87922cebe89077e8f0eb9c
|
[
"Apache-2.0"
] | 3
|
2021-02-02T06:10:50.000Z
|
2021-05-10T01:27:31.000Z
|
torchocr/datasets/txt_reader.py
|
hua1024/OpenOCR
|
13ecfd18d103d5e70a87922cebe89077e8f0eb9c
|
[
"Apache-2.0"
] | null | null | null |
torchocr/datasets/txt_reader.py
|
hua1024/OpenOCR
|
13ecfd18d103d5e70a87922cebe89077e8f0eb9c
|
[
"Apache-2.0"
] | 2
|
2021-02-02T06:11:25.000Z
|
2021-02-09T16:27:48.000Z
|
# coding=utf-8
# @Time : 2020/12/25 10:45
# @Auto : zzf-jeff
from torchocr.datasets.builder import DATASET
from torchocr.datasets.base import BaseDataset
from abc import ABCMeta, abstractmethod
import numpy as np
import cv2
import os
from tqdm import tqdm
@DATASET.register_module()
class DetTextICDAR15(BaseDataset, metaclass=ABCMeta):
def __init__(self, ann_file, pipeline, mode, data_root, **kwargs):
if 'split_type' in kwargs:
self.split_type = kwargs['split_type']
else:
self.split_type = ','
super().__init__(ann_file, pipeline, mode, data_root, **kwargs)
def load_annotations(self, ann_file):
infos = self.read_txt(ann_file, self.split_type)
data_infos = []
for (img_path, gt_path) in tqdm(infos):
labels, texts = self.get_bboxs(gt_path)
data_infos.append({'img_path': img_path, 'label': labels, 'text': texts})
return data_infos
def get_bboxs(self, gt_path):
"""
:param gt_path:
:return: labels [['2','3','4','5']]
texts [['sss']]
"""
labels = []
texts = []
with open(gt_path, 'r', encoding='utf-8') as fr:
lines = fr.readlines()
for line in lines:
line = line.replace('\ufeff', '').replace('\xef\xbb\xbf', '').strip('\n')
gt = line.split(',')
labels.append(gt[:8])
texts.append(gt[8:])
return labels, texts
def read_txt(self, txt_path, split_type):
'''
读取txt文件的标注信息,格式为
xxx/a/1.png,a
xxx/a/2.png,a
Args:
txt_path: train/valid/test data txt or json
Returns:
imgs:list, all data info
'''
with open(txt_path, 'r', encoding='utf-8') as f:
infos = list(map(lambda line: line.strip().split(split_type), f))
return infos
@DATASET.register_module()
class DetTextDataset(BaseDataset, metaclass=ABCMeta):
def __init__(self, ann_file, pipeline, mode, data_root, **kwargs):
if 'split_type' in kwargs:
self.split_type = kwargs['split_type']
else:
self.split_type = ','
super().__init__(ann_file, pipeline, mode, data_root, **kwargs)
def load_annotations(self, ann_file):
infos = self.read_txt(ann_file, self.split_type)
data_infos = []
for (img_path, gt_path) in tqdm(infos):
if self.data_root:
img_path = os.path.join(self.data_root, img_path)
gt_path = os.path.join(self.data_root, gt_path)
labels, texts = self.get_bboxs(gt_path)
data_infos.append({'img_path': img_path, 'label': labels, 'text': texts})
return data_infos
def get_bboxs(self, gt_path):
labels = []
texts = []
with open(gt_path, 'r', encoding='utf-8') as fr:
lines = fr.readlines()
for line in lines:
line = line.replace('\ufeff', '').replace('\xef\xbb\xbf', '').strip('\n')
gt = line.split(',')
labels.append(gt[:8])
texts.append(['ocr'])
return labels, texts
def read_txt(self, txt_path, split_type):
'''
读取txt文件的标注信息,格式为
xxx/a/1.png,a
xxx/a/2.png,a
Args:
txt_path: train/valid/test data txt or json
Returns:
imgs:list, all data info
'''
with open(txt_path, 'r', encoding='utf-8') as f:
infos = list(map(lambda line: line.strip().split(split_type), f))
return infos
@DATASET.register_module()
class RecTextDataset(BaseDataset, metaclass=ABCMeta):
def __init__(self, ann_file, pipeline, mode, data_root, **kwargs):
if 'split_type' in kwargs:
self.split_type = kwargs['split_type']
else:
self.split_type = ' '
super().__init__(ann_file, pipeline, mode, data_root, **kwargs)
def load_annotations(self, ann_file):
infos = self.read_txt(ann_file, self.split_type)
data_infos = []
for info in tqdm(infos):
if len(info) != 2:
continue
img_path, label = info[0], info[1]
if self.data_root:
img_path = os.path.join(self.data_root, img_path)
data_infos.append({'img_path': img_path, 'label': label})
return data_infos
def read_txt(self, txt_path, split_type):
'''
读取txt文件的标注信息,格式为
xxx/a/1.png,a
xxx/a/2.png,a
Args:
txt_path: train/valid/test data txt or json
Returns:
imgs:list, all data info
'''
with open(txt_path, 'r', encoding='utf-8') as f:
infos = list(map(lambda line: line.strip().split(split_type, 1), f))
return infos
@DATASET.register_module()
class AyxRecTextDataset(RecTextDataset):
def load_annotations(self, ann_file):
data_infos = []
infos = super(AyxRecTextDataset, self).read_txt(ann_file, self.split_type)
for info in infos:
txt_file, use_flag = info
if int(use_flag) == 1:
data_infos += super(AyxRecTextDataset, self).load_annotations(txt_file)
return data_infos
@DATASET.register_module()
class AyxDetTextDataset(DetTextDataset):
def load_annotations(self, ann_file):
data_infos = []
infos = super(AyxDetTextDataset, self).read_txt(ann_file, self.split_type)
for info in infos:
txt_file, use_flag = info
if int(use_flag) == 1:
data_infos += super(AyxDetTextDataset, self).load_annotations(txt_file)
return data_infos
| 34.430233
| 90
| 0.560621
| 731
| 5,922
| 4.331053
| 0.164159
| 0.065382
| 0.045167
| 0.036008
| 0.834176
| 0.825963
| 0.825963
| 0.805749
| 0.779848
| 0.769109
| 0
| 0.009933
| 0.319993
| 5,922
| 171
| 91
| 34.631579
| 0.77626
| 0.092536
| 0
| 0.769912
| 0
| 0
| 0.037396
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115044
| false
| 0
| 0.061947
| 0
| 0.309735
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
05aa60420d3d669b985e50aab2eac45a7cb3c914
| 3,188
|
py
|
Python
|
tests/test_diff_positions.py
|
jacebrowning/beefore
|
76a43cfa3fb83cfa3b3301277636f1beca35aee7
|
[
"BSD-3-Clause"
] | 24
|
2016-07-14T04:25:31.000Z
|
2019-02-12T19:37:01.000Z
|
tests/test_diff_positions.py
|
jacebrowning/beefore
|
76a43cfa3fb83cfa3b3301277636f1beca35aee7
|
[
"BSD-3-Clause"
] | 17
|
2016-07-17T10:51:56.000Z
|
2019-02-02T20:20:43.000Z
|
tests/test_diff_positions.py
|
jacebrowning/beefore
|
76a43cfa3fb83cfa3b3301277636f1beca35aee7
|
[
"BSD-3-Clause"
] | 19
|
2016-07-15T01:05:00.000Z
|
2018-06-03T17:35:40.000Z
|
import unittest
from beefore import diff
class TestDiff(unittest.TestCase):
def test_add_lines(self):
diff_content = [
"diff --git a/tests/path/to/testfile b/tests/path/to/testfile",
"@@ -1,4 +1,6 @@",
" 1",
"+2",
"+3",
" 4",
" 5",
" 6"
]
self.assertEqual(
diff.positions('tests', diff_content),
{
"path/to/testfile": {1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6}
}
)
def test_subtract_lines(self):
diff_content = [
"diff --git a/tests/path/to/testfile b/tests/path/to/testfile",
"@@ -1,6 +1,2 @@",
" 1",
"-2",
"-3",
" 4"
]
self.assertEqual(
diff.positions('tests', diff_content),
{
"path/to/testfile": {1: 1, 2: 4}
}
)
def test_add_subtract(self):
diff_content = [
"diff --git a/tests/path/to/testfile b/tests/path/to/testfile",
"index 5f4d692..5b05678 100644",
"--- a/tests/path/to/testfile",
"+++ b/tests/path/to/testfile",
"@@ -2,0 +2,1 @@",
" 1",
"+2",
" 3",
"@@ -13,7 +14,4 @@",
" 4",
"-5",
"-6",
"+7",
" 8",
"-9",
"+10"
]
self.assertEqual(
diff.positions('tests', diff_content),
{
"path/to/testfile": {2: 1, 3: 2, 4: 3, 14: 5, 15: 8, 16: 9, 17: 11}
}
)
def test_no_diff(self):
diff_content = [
"1",
"2",
"3",
"4"
]
self.assertEqual(
diff.positions('tests', diff_content),
{}
)
def test_multi_file(self):
diff_content = [
"diff --git a/tests/path/to/testfile b/tests/path/to/testfile",
"index 5f4d692..5b05678 100644",
"--- a/tests/path/to/testfile",
"+++ b/tests/path/to/testfile",
"@@ -2,0 +2,1 @@",
" 1",
"+2",
" 3",
"@@ -13,7 +14,4 @@",
" 4",
"-5",
"-6",
"+7",
" 8",
"-9",
"+10",
"diff --git a/tests/path/to/secondfile b/tests/path/to/secondfile",
"index 5f4d692..5b05678 100644",
"--- a/tests/path/to/secondfile",
"+++ b/tests/path/to/secondfile",
"@@ -2,0 +2,1 @@",
" 1",
"+2",
" 3",
"@@ -13,7 +14,4 @@",
" 4",
"-5",
"-6",
"+7",
" 8",
"-9",
"+10"
]
self.assertEqual(
diff.positions('tests', diff_content),
{
"path/to/testfile": {2: 1, 3: 2, 4: 3, 14: 5, 15: 8, 16: 9, 17: 11},
"path/to/secondfile": {2: 1, 3: 2, 4: 3, 14: 5, 15: 8, 16: 9, 17: 11},
}
)
| 25.102362
| 86
| 0.354141
| 331
| 3,188
| 3.350453
| 0.148036
| 0.113616
| 0.158702
| 0.205591
| 0.829576
| 0.829576
| 0.823264
| 0.823264
| 0.800721
| 0.800721
| 0
| 0.123669
| 0.469887
| 3,188
| 126
| 87
| 25.301587
| 0.532544
| 0
| 0
| 0.610619
| 0
| 0.044248
| 0.276976
| 0.122961
| 0
| 0
| 0
| 0
| 0.044248
| 1
| 0.044248
| false
| 0
| 0.017699
| 0
| 0.070796
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
af3aad7524058c7c4fd2dde5a01f2e180e00eaf4
| 5,736
|
py
|
Python
|
tests/test_auth.py
|
cailholm/django-saml2-pro-auth
|
7da92ad814111726cddea0c4a39f29324a5ab2b4
|
[
"MIT"
] | 24
|
2017-11-06T14:08:15.000Z
|
2020-01-27T14:26:57.000Z
|
tests/test_auth.py
|
cailholm/django-saml2-pro-auth
|
7da92ad814111726cddea0c4a39f29324a5ab2b4
|
[
"MIT"
] | 29
|
2017-11-01T14:31:00.000Z
|
2020-02-06T08:33:14.000Z
|
tests/test_auth.py
|
cailholm/django-saml2-pro-auth
|
7da92ad814111726cddea0c4a39f29324a5ab2b4
|
[
"MIT"
] | 24
|
2017-11-01T15:17:49.000Z
|
2020-01-10T17:06:28.000Z
|
from django.test import TestCase
from django.test.utils import override_settings
from saml2_pro_auth.auth import get_clean_map
from saml2_pro_auth.utils import SAMLSettingsError
class TestAuth(TestCase):
def test_mapping_users_with_index_values(self):
user_map = {
"email": {"index": 0, "key": "Email"},
"name": {"index": 0, "key": "Username"},
}
saml_map = {
"Username": ["montypython"],
"lastName": ["Cleese"],
"Email": ["montypython@example.com"],
"firstName": ["John"],
}
merged_map = get_clean_map(user_map, saml_map)
self.assertEqual(merged_map["email"], "montypython@example.com")
self.assertEqual(merged_map["name"], "montypython")
def test_mapping_users_without_index_values(self):
user_map = {"email": "Email", "name": "Username"}
saml_map = {
"Username": ["montypython"],
"lastName": ["Cleese"],
"Email": ["montypython@example.com"],
"firstName": ["John"],
}
merged_map = get_clean_map(user_map, saml_map)
self.assertEqual(merged_map["email"], "montypython@example.com")
self.assertEqual(merged_map["name"], "montypython")
def test_mapping_users_with_mixed_value_styles(self):
user_map = {
"email": "Email",
"name": {"index": 1, "key": "Username"},
"customer": {"key": "Client"},
}
saml_map = {
"Username": ["", "montypython"],
"lastName": ["Cleese"],
"Email": ["montypython@example.com"],
"firstName": ["John"],
"Client": ["examplecorp"],
}
merged_map = get_clean_map(user_map, saml_map)
self.assertEqual(merged_map["email"], "montypython@example.com")
self.assertEqual(merged_map["name"], "montypython")
self.assertEqual(merged_map["customer"], "examplecorp")
def test_mapping_users_with_default_values(self):
user_map = {
"email": "Email",
"name": {"index": 1, "key": "Username", "default": "testUsername"},
"customer": {"key": "Client", "default": "testClient"},
}
saml_map = {
"Username": ["", "montypython"],
"lastName": ["Cleese"],
"Email": ["montypython@example.com"],
"firstName": ["John"],
"Client": ["examplecorp"],
}
self.assertRaises(SAMLSettingsError, get_clean_map, user_map, saml_map)
@override_settings(SAML_USERS_STRICT_MAPPING=False)
def test_non_strict_mapping_users_with_index_values(self):
user_map = {
"email": {"index": 0, "key": "Email"},
"name": {"index": 0, "key": "Username"},
"age": {"index": 0, "key": "Age"},
}
saml_map = {
"Username": ["montypython"],
"lastName": ["Cleese"],
"Email": ["montypython@example.com"],
"firstName": ["John"],
}
merged_map = get_clean_map(user_map, saml_map)
self.assertEqual(merged_map["email"], "montypython@example.com")
self.assertEqual(merged_map["name"], "montypython")
self.assertTrue("age" not in merged_map)
@override_settings(SAML_USERS_STRICT_MAPPING=False)
def test_non_strict_mapping_users_without_index_values(self):
user_map = {
"email": "Email",
"name": "Username",
"age": "Age",
}
saml_map = {
"Username": ["montypython"],
"lastName": ["Cleese"],
"Email": ["montypython@example.com"],
"firstName": ["John"],
}
merged_map = get_clean_map(user_map, saml_map)
self.assertEqual(merged_map["email"], "montypython@example.com")
self.assertEqual(merged_map["name"], "montypython")
self.assertTrue("age" not in merged_map)
@override_settings(SAML_USERS_STRICT_MAPPING=False)
def test_non_strict_mapping_users_with_mixed_value_styles(self):
user_map = {
"email": "Email",
"name": {"index": 1, "key": "Username"},
"customer": {"key": "Client"},
"age": "Age",
}
saml_map = {
"Username": ["", "montypython"],
"lastName": ["Cleese"],
"Email": ["montypython@example.com"],
"firstName": ["John"],
"Client": ["examplecorp"],
}
merged_map = get_clean_map(user_map, saml_map)
self.assertEqual(merged_map["email"], "montypython@example.com")
self.assertEqual(merged_map["name"], "montypython")
self.assertEqual(merged_map["customer"], "examplecorp")
self.assertTrue("age" not in merged_map)
@override_settings(SAML_USERS_STRICT_MAPPING=False)
def test_non_strict_mapping_users_with_default_value(self):
user_map = {
"email": {"key": "Email"},
"name": {"key": "Username", "index": 1},
"is_superuser": {"key": "is_superuser", "default": False},
"is_staff": {"key": "is_staff", "default": True},
}
saml_map = {
"Username": ["", "montypython"],
"lastName": ["Cleese"],
"Email": ["montypython@example.com"],
"firstName": ["John"],
"Client": ["examplecorp"],
}
merged_map = get_clean_map(user_map, saml_map)
self.assertEqual(merged_map["email"], "montypython@example.com")
self.assertEqual(merged_map["name"], "montypython")
self.assertEqual(merged_map["is_superuser"], False)
self.assertEqual(merged_map["is_staff"], True)
| 35.407407
| 79
| 0.560669
| 564
| 5,736
| 5.427305
| 0.111702
| 0.082326
| 0.123489
| 0.14113
| 0.862463
| 0.843515
| 0.843515
| 0.834368
| 0.834368
| 0.834368
| 0
| 0.002656
| 0.278068
| 5,736
| 161
| 80
| 35.627329
| 0.736537
| 0
| 0
| 0.703704
| 0
| 0
| 0.251743
| 0.060146
| 0
| 0
| 0
| 0
| 0.162963
| 1
| 0.059259
| false
| 0
| 0.02963
| 0
| 0.096296
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
af44c51d8d646cae14ec97ee96416d490d3971b3
| 2,687
|
py
|
Python
|
test/regex.py
|
jesopo/scpl
|
1fa5acfb468ab212276781fa1760bb5eda438c23
|
[
"MIT"
] | null | null | null |
test/regex.py
|
jesopo/scpl
|
1fa5acfb468ab212276781fa1760bb5eda438c23
|
[
"MIT"
] | 2
|
2021-11-15T11:12:14.000Z
|
2021-11-15T17:35:27.000Z
|
test/regex.py
|
jesopo/scpl
|
1fa5acfb468ab212276781fa1760bb5eda438c23
|
[
"MIT"
] | null | null | null |
import re, unittest
from ipaddress import ip_network
from scpl.regex import lexer
class RegexTestLexer(unittest.TestCase):
def test_literal(self):
tokens = lexer.tokenise("abc")
self.assertEqual(len(tokens), 3)
self.assertIsInstance(tokens[0], lexer.RegexTokenLiteral)
self.assertEqual(tokens[0].text, "a")
self.assertIsInstance(tokens[1], lexer.RegexTokenLiteral)
self.assertEqual(tokens[1].text, "b")
self.assertIsInstance(tokens[2], lexer.RegexTokenLiteral)
self.assertEqual(tokens[2].text, "c")
def test_repeats(self):
tokens = lexer.tokenise("a+a*")
self.assertEqual(len(tokens), 4)
self.assertIsInstance(tokens[0], lexer.RegexTokenLiteral)
self.assertEqual(tokens[0].text, "a")
self.assertIsInstance(tokens[1], lexer.RegexTokenOperator)
self.assertEqual(tokens[1].text, "+")
self.assertIsInstance(tokens[2], lexer.RegexTokenLiteral)
self.assertEqual(tokens[2].text, "a")
self.assertIsInstance(tokens[3], lexer.RegexTokenOperator)
self.assertEqual(tokens[3].text, "*")
def test_group(self):
tokens = lexer.tokenise("(a)")
self.assertEqual(len(tokens), 3)
self.assertIsInstance(tokens[0], lexer.RegexTokenScope)
self.assertEqual(tokens[0].text, "(")
self.assertIsInstance(tokens[1], lexer.RegexTokenLiteral)
self.assertEqual(tokens[1].text, "a")
self.assertIsInstance(tokens[2], lexer.RegexTokenScope)
self.assertEqual(tokens[2].text, ")")
def test_group_nested(self):
tokens = lexer.tokenise("((a))")
self.assertEqual(len(tokens), 5)
self.assertIsInstance(tokens[0], lexer.RegexTokenScope)
self.assertEqual(tokens[0].text, "(")
self.assertIsInstance(tokens[1], lexer.RegexTokenScope)
self.assertEqual(tokens[1].text, "(")
self.assertIsInstance(tokens[2], lexer.RegexTokenLiteral)
self.assertEqual(tokens[2].text, "a")
self.assertIsInstance(tokens[3], lexer.RegexTokenScope)
self.assertEqual(tokens[3].text, ")")
self.assertIsInstance(tokens[4], lexer.RegexTokenScope)
self.assertEqual(tokens[4].text, ")")
def test_group_flags(self):
tokens = lexer.tokenise("(?i:a)")
self.assertEqual(len(tokens), 3)
self.assertIsInstance(tokens[0], lexer.RegexTokenScope)
self.assertEqual(tokens[0].text, "(?i:")
self.assertIsInstance(tokens[1], lexer.RegexTokenLiteral)
self.assertEqual(tokens[1].text, "a")
self.assertIsInstance(tokens[2], lexer.RegexTokenScope)
self.assertEqual(tokens[2].text, ")")
| 43.33871
| 66
| 0.662449
| 293
| 2,687
| 6.047782
| 0.139932
| 0.194695
| 0.264108
| 0.167043
| 0.850451
| 0.73307
| 0.73307
| 0.73307
| 0.73307
| 0.693002
| 0
| 0.018868
| 0.191291
| 2,687
| 61
| 67
| 44.04918
| 0.796595
| 0
| 0
| 0.472727
| 0
| 0
| 0.015631
| 0
| 0
| 0
| 0
| 0
| 0.745455
| 1
| 0.090909
| false
| 0
| 0.054545
| 0
| 0.163636
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
af521633408b711dd587da7205a1395231e2ce0e
| 47
|
py
|
Python
|
test/__init__.py
|
stanford-policylab/mathbot
|
f41febffaaa78e6e1d09994cd9c34df5185ce251
|
[
"MIT"
] | 1
|
2020-10-15T03:14:21.000Z
|
2020-10-15T03:14:21.000Z
|
test/__init__.py
|
stanford-policylab/mathbot
|
f41febffaaa78e6e1d09994cd9c34df5185ce251
|
[
"MIT"
] | null | null | null |
test/__init__.py
|
stanford-policylab/mathbot
|
f41febffaaa78e6e1d09994cd9c34df5185ce251
|
[
"MIT"
] | null | null | null |
from .test_db import *
from .test_app import *
| 15.666667
| 23
| 0.744681
| 8
| 47
| 4.125
| 0.625
| 0.484848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 24
| 23.5
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
afb21a7501bcb684aadefc9c664f65fad3e69973
| 13,858
|
py
|
Python
|
tests/test_oblique_shock_relations.py
|
Rigel09/CompAero
|
79a2902880c5bf6030794d585a48fbbf0c7df344
|
[
"MIT"
] | 1
|
2022-03-29T23:59:16.000Z
|
2022-03-29T23:59:16.000Z
|
tests/test_oblique_shock_relations.py
|
Rigel09/CompAero
|
79a2902880c5bf6030794d585a48fbbf0c7df344
|
[
"MIT"
] | 7
|
2022-01-15T15:38:45.000Z
|
2022-01-22T16:32:16.000Z
|
tests/test_oblique_shock_relations.py
|
Rigel09/CompAero
|
79a2902880c5bf6030794d585a48fbbf0c7df344
|
[
"MIT"
] | null | null | null |
from math import radians
from pytest import approx
import pytest
from CompAero.ObliqueShockRelations import ObliqueShockRelations as osr
from CompAero.internal import InvalidOptionCombinationError, ShockType
class TestObliqueShockClassFuncs:
gamma = 1.4
#######################################################################################
# Test the Functions for Subsonic Case
#######################################################################################
# TODO: Figure out how to handle some of the functions that calculate mach from a value?
# What to do for the subsonic case, raise an exception?
# Can it be detected? I.E. value always greater than X for mach > 1?
def test_subsonic_p2_p1(self):
with pytest.raises(ValueError):
osr.calc_mach_normal_ahead_shock(0.5, self.gamma)
#######################################################################################
# Test the Functions for Superonic Case
#######################################################################################
# TODO: This function needs more tests and more checking on output validation
def test_supersonic_calc_mach_normal_1(self):
assert osr.calc_mach_normal_ahead_shock(1.5, radians(53.61)) == approx(1.2075, rel=1e-4)
def test_supersonic_calc_mach_from_mach_normal_1(self):
assert osr.calc_mach_ahead_shock_from_mach_normal_ahead_shock(1.207496034, radians(53.61)) == approx(
1.5, rel=1e-1
)
def test_supersonic_calc_beta_from_mach_normal_1(self):
assert osr.calc_beta_from_mach_mach_normal_ahead_shock(1.5, 1.25340) == approx(
radians(56.67868), rel=1e-4
)
def test_supersonic_calc_mach_2(self):
assert osr.calc_mach_behind_shock(0.83754, radians(8.56287), radians(53.61)) == approx(
1.18349, rel=1e-4
)
def test_supersonic_calc_mn2_from_m2(self):
assert osr.calc_mach_normal_behind_shock_from_mach_behind_shock(
1.11438, radians(56.67868), radians(10.0)
) == approx(0.81073, rel=1e-4)
def test_supersonic_calc_theta_from_beta_mach(self):
assert osr.calc_theta_from_theta_beta_mach(radians(56.67868), 1.5, self.gamma) == approx(
radians(10.0), rel=1e-1
)
def test_supersonic_calc_beta_from_theta_mach_weak(self):
assert osr.calc_beta_from_theta_beta_mach_weak(radians(10.0), 1.5, self.gamma) == approx(
radians(56.67868), rel=1e-4
)
def test_supersonic_calc_beta_from_theta_mach_strong(self):
assert osr.calc_beta_from_theta_beta_mach_strong(radians(10.0), 1.5, self.gamma) == approx(
radians(75.99487), rel=1e-4
)
def test_supersonic_calc_mach_from_theta_beta(self):
assert osr.calc_mach_from_theta_beta_mach(radians(56.67868), radians(10.0), self.gamma) == approx(
1.5, rel=1e-4
)
def test_supersonic_max_flow_deflection_angle(self):
assert osr.calc_max_flow_deflection_angle(radians(66.5888), 1.5, self.gamma) == approx(
radians(12.11267), rel=1e-4
)
def test_supersonic_max_shock_angle(self):
assert osr.calc_max_shock_angle(1.5, self.gamma) == approx(radians(66.5888), rel=1e-4)
def test_supersonic_calc_mach_wave_angle(self):
assert osr.calc_mach_wave_angle(1.5) == approx(radians(41.8103149), rel=1e-5)
def test_supersonic_calc_mach_from_mach_wave_angle(self):
assert osr.calc_mach_from_mach_wave_angle(radians(41.8103149)) == approx(1.5, rel=1e-1)
# TODO: Figure out someway to test the plotting feature of the TBM chart
###########################################################################################################
# Test the class construction
###########################################################################################################
class TestObliqueShockRelationsClass:
gamma = 1.4
def test_construction_from_mach_flow_deflection(self):
inst = osr(self.gamma, mach=1.5, wedgeAngle=10.0)
assert inst.shockAngle == approx(radians(56.67868), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.25340, rel=1e-4)
assert inst.machNorm2 == approx(0.81073, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(1.5, rel=1e-1)
assert inst.p2_p1 == approx(1.66619, rel=1e-4)
assert inst.rho2_rho1 == approx(1.43450, rel=1e-4)
assert inst.t2_t1 == approx(1.16151, rel=1e-4)
assert inst.po2_po1 == approx(0.98660, rel=1e-4)
assert inst.po2_p1 == approx(2.56720)
assert inst.mach2 == approx(1.11438, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_from_mach_norm1_shock_angle(self):
inst = osr(self.gamma, mn1=1.2534, shockAngle=56.67868)
assert inst.shockAngle == approx(radians(56.67868), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.25340, rel=1e-4)
assert inst.machNorm2 == approx(0.81073, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(1.5, rel=1e-1)
assert inst.p2_p1 == approx(1.66619, rel=1e-4)
assert inst.rho2_rho1 == approx(1.43450, rel=1e-4)
assert inst.t2_t1 == approx(1.16151, rel=1e-4)
assert inst.po2_po1 == approx(0.98660, rel=1e-4)
assert inst.po2_p1 == approx(2.5672, rel=1e-4)
assert inst.mach2 == approx(1.11438, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_from_shock_angle_wedge_angle(self):
inst = osr(self.gamma, wedgeAngle=10.0, shockAngle=56.67868)
assert inst.shockAngle == approx(radians(56.67868), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.25340, rel=1e-4)
assert inst.machNorm2 == approx(0.81073, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(1.5, rel=1e-1)
assert inst.p2_p1 == approx(1.66619, rel=1e-4)
assert inst.rho2_rho1 == approx(1.43450, rel=1e-4)
assert inst.t2_t1 == approx(1.16151, rel=1e-4)
assert inst.po2_po1 == approx(0.98660, rel=1e-4)
assert inst.po2_p1 == approx(2.5672, rel=1e-4)
assert inst.mach2 == approx(1.11438, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_from_shock_angle_wedge_angle_m2_fail(self):
with pytest.raises(ValueError):
inst = osr(self.gamma, wedgeAngle=10.0, shockAngle=56.67868, m2=2.5672)
def test_construction_from_shock_angle_wedge_angle_m2(self):
inst = osr(self.gamma, wedgeAngle=10.0, shockAngle=23.01624, m2=3.13545)
assert inst.shockAngle == approx(radians(23.01624), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.48577, rel=1e-4)
assert inst.machNorm2 == approx(0.70619, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(3.8, rel=1e-1)
assert inst.p2_p1 == approx(2.40876, rel=1e-4)
assert inst.rho2_rho1 == approx(1.83768, rel=1e-4)
assert inst.t2_t1 == approx(1.31077, rel=1e-4)
assert inst.po2_po1 == approx(0.93423, rel=1e-4)
assert inst.po2_p1 == approx(3.35977, rel=1e-4)
assert inst.mach2 == approx(3.13545, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_from_shock_angle_wedge_angle(self):
inst = osr(self.gamma, wedgeAngle=10.0, shockAngle=23.01624)
assert inst.shockAngle == approx(radians(23.01624), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.48577, rel=1e-4)
assert inst.machNorm2 == approx(0.70619, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(3.8, rel=1e-1)
assert inst.p2_p1 == approx(2.40876, rel=1e-4)
assert inst.rho2_rho1 == approx(1.83768, rel=1e-4)
assert inst.t2_t1 == approx(1.31077, rel=1e-4)
assert inst.po2_po1 == approx(0.93423, rel=1e-4)
assert inst.po2_p1 == approx(3.35977, rel=1e-4)
assert inst.mach2 == approx(3.13545, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_from_po2_p1(self):
inst = osr(self.gamma, po2_p1=3.35977, shockAngle=23.01624)
assert inst.shockAngle == approx(radians(23.01624), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.48577, rel=1e-4)
assert inst.machNorm2 == approx(0.70619, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(3.8, rel=1e-1)
assert inst.p2_p1 == approx(2.40876, rel=1e-4)
assert inst.rho2_rho1 == approx(1.83768, rel=1e-4)
assert inst.t2_t1 == approx(1.31077, rel=1e-4)
assert inst.po2_po1 == approx(0.93423, rel=1e-4)
assert inst.po2_p1 == approx(3.35977, rel=1e-4)
assert inst.mach2 == approx(3.13545, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_from_p2_p1(self):
inst = osr(self.gamma, p2_p1=2.40876, shockAngle=23.01624)
assert inst.shockAngle == approx(radians(23.01624), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.48577, rel=1e-4)
assert inst.machNorm2 == approx(0.70619, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(3.8, rel=1e-1)
assert inst.p2_p1 == approx(2.40876, rel=1e-4)
assert inst.rho2_rho1 == approx(1.83768, rel=1e-4)
assert inst.t2_t1 == approx(1.31077, rel=1e-4)
assert inst.po2_po1 == approx(0.93423, rel=1e-4)
assert inst.po2_p1 == approx(3.35977, rel=1e-4)
assert inst.mach2 == approx(3.13545, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_from_rho2_rho1(self):
inst = osr(self.gamma, rho2_rho1=1.83768, shockAngle=23.01624)
assert inst.shockAngle == approx(radians(23.01624), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.48577, rel=1e-4)
assert inst.machNorm2 == approx(0.70619, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(3.8, rel=1e-1)
assert inst.p2_p1 == approx(2.40876, rel=1e-4)
assert inst.rho2_rho1 == approx(1.83768, rel=1e-4)
assert inst.t2_t1 == approx(1.31077, rel=1e-4)
assert inst.po2_po1 == approx(0.93423, rel=1e-4)
assert inst.po2_p1 == approx(3.35977, rel=1e-4)
assert inst.mach2 == approx(3.13545, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_from_t2_t1(self):
inst = osr(self.gamma, t2_t1=1.31077, shockAngle=23.01624)
assert inst.shockAngle == approx(radians(23.01624), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.48577, rel=1e-4)
assert inst.machNorm2 == approx(0.70619, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(3.8, rel=1e-1)
assert inst.p2_p1 == approx(2.40876, rel=1e-4)
assert inst.rho2_rho1 == approx(1.83768, rel=1e-4)
assert inst.t2_t1 == approx(1.31077, rel=1e-4)
assert inst.po2_po1 == approx(0.93423, rel=1e-4)
assert inst.po2_p1 == approx(3.35977, rel=1e-4)
assert inst.mach2 == approx(3.13545, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_from_po2_po1(self):
inst = osr(self.gamma, po2_po1=0.93423, shockAngle=23.01624)
assert inst.shockAngle == approx(radians(23.01624), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.48577, rel=1e-4)
assert inst.machNorm2 == approx(0.70619, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(3.8, rel=1e-1)
assert inst.p2_p1 == approx(2.40876, rel=1e-4)
assert inst.rho2_rho1 == approx(1.83768, rel=1e-4)
assert inst.t2_t1 == approx(1.31077, rel=1e-4)
assert inst.po2_po1 == approx(0.93423, rel=1e-4)
assert inst.po2_p1 == approx(3.35977, rel=1e-4)
assert inst.mach2 == approx(3.13545, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_from_mn2_shock_angle(self):
inst = osr(self.gamma, mn2=0.70619, shockAngle=23.01624)
assert inst.shockAngle == approx(radians(23.01624), rel=1e-4)
assert inst.wedgeAngle == approx(radians(10.0), rel=1e-4)
assert inst.machNorm1 == approx(1.48577, rel=1e-4)
assert inst.machNorm2 == approx(0.70619, rel=1e-4)
assert inst.gamma == approx(self.gamma, rel=1e-1)
assert inst.mach == approx(3.8, rel=1e-1)
assert inst.p2_p1 == approx(2.40876, rel=1e-4)
assert inst.rho2_rho1 == approx(1.83768, rel=1e-4)
assert inst.t2_t1 == approx(1.31077, rel=1e-4)
assert inst.po2_po1 == approx(0.93423, rel=1e-4)
assert inst.po2_p1 == approx(3.35977, rel=1e-4)
assert inst.mach2 == approx(3.13545, rel=1e-4)
assert inst.shockType == ShockType.WEAK
def test_construction_not_enough_args(self):
with pytest.raises(InvalidOptionCombinationError):
inst = osr(self.gamma)
| 50.392727
| 109
| 0.628374
| 2,080
| 13,858
| 4.053846
| 0.076442
| 0.085389
| 0.083966
| 0.155123
| 0.856499
| 0.823529
| 0.775735
| 0.745256
| 0.719521
| 0.681807
| 0
| 0.13207
| 0.203926
| 13,858
| 274
| 110
| 50.576642
| 0.632252
| 0.032977
| 0
| 0.660793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00365
| 0.687225
| 1
| 0.118943
| false
| 0
| 0.022026
| 0
| 0.15859
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb638509ed8243e8ff6ac5a079efd46e74da6470
| 22,215
|
py
|
Python
|
tests/test_dump_to_ckan.py
|
OriHoch/datapackage-pipelines-ckan
|
7783eed00ce1bb2e8f5e689df5232a72f9e6a1c1
|
[
"MIT"
] | null | null | null |
tests/test_dump_to_ckan.py
|
OriHoch/datapackage-pipelines-ckan
|
7783eed00ce1bb2e8f5e689df5232a72f9e6a1c1
|
[
"MIT"
] | 9
|
2017-10-04T10:09:13.000Z
|
2019-04-15T12:47:12.000Z
|
tests/test_dump_to_ckan.py
|
OriHoch/datapackage-pipelines-ckan
|
7783eed00ce1bb2e8f5e689df5232a72f9e6a1c1
|
[
"MIT"
] | 1
|
2018-09-13T13:16:06.000Z
|
2018-09-13T13:16:06.000Z
|
import importlib
import io
import json
import os
import unittest
import requests_mock
import mock
from datapackage_pipelines.wrapper.input_processor import ResourceIterator
import datapackage_pipelines_ckan.processors
import logging
log = logging.getLogger(__name__)
@mock.patch('datapackage_pipelines.lib.dump.dumper_base.ingest')
@mock.patch('datapackage_pipelines.lib.dump.dumper_base.spew')
def mock_dump_test(processor, ingest_tuple, mock_spew, mock_ingest):
'''Helper function returns the `spew` for a given processor with a given
`ingest` tuple.'''
# Mock all calls to `ingest` to return `ingest_tuple`
mock_ingest.return_value = ingest_tuple
# Call processor
file_path = processor
module_name = '__main__'
spec = importlib.util.spec_from_file_location(module_name, file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Our processor called `spew`. Return the args it was called with.
return mock_spew.call_args
class TestDumpToCkanProcessor(unittest.TestCase):
@requests_mock.mock()
def test_dump_to_ckan_no_resources(self, mock_request):
base_url = 'https://demo.ckan.org/api/3/action/'
package_create_url = '{}package_create'.format(base_url)
mock_request.post(package_create_url,
json={
'success': True,
'result': {'id': 'ckan-package-id'}})
# input arguments used by our mock `ingest`
datapackage = {
'name': 'my-datapackage',
'project': 'my-project',
'resources': []
}
params = {
'ckan-host': 'https://demo.ckan.org',
'ckan-api-key': 'my-api-key',
'dataset-properties': {
'extra_prop': 'hi'
}
}
# Path to the processor we want to test
processor_dir = \
os.path.dirname(datapackage_pipelines_ckan.processors.__file__)
processor_path = os.path.join(processor_dir, 'dump/to_ckan.py')
# Trigger the processor with our mock `ingest` and capture what it will
# returned to `spew`.
spew_args, _ = mock_dump_test(processor_path,
(params, datapackage, []))
spew_res_iter = spew_args[1]
assert list(spew_res_iter) == []
requests = mock_request.request_history
assert len(requests) == 1
assert requests[0].url == package_create_url
@requests_mock.mock()
def test_dump_to_ckan_package_create_error(self, mock_request):
'''Create failed due to existing package, no overwrite so raise
exception'''
base_url = 'https://demo.ckan.org/api/3/action/'
package_create_url = '{}package_create'.format(base_url)
mock_request.post(package_create_url,
json={
'success': False,
'error': {"__type": "Validation Error",
"name": ["That URL is already in use."]}
})
# input arguments used by our mock `ingest`
datapackage = {
'name': 'my-datapackage',
'project': 'my-project',
'resources': []
}
params = {
'ckan-host': 'https://demo.ckan.org',
'ckan-api-key': 'my-api-key'
}
# Path to the processor we want to test
processor_dir = \
os.path.dirname(datapackage_pipelines_ckan.processors.__file__)
processor_path = os.path.join(processor_dir, 'dump/to_ckan.py')
# Trigger the processor with our mock `ingest` and capture what it will
# returned to `spew`.
spew_args, _ = mock_dump_test(processor_path,
(params, datapackage, []))
spew_res_iter = spew_args[1]
with self.assertRaises(Exception):
list(spew_res_iter)
requests = mock_request.request_history
assert len(requests) == 1
assert requests[0].url == package_create_url
@requests_mock.mock()
def test_dump_to_ckan_package_create_error_overwrite(self, mock_request):
'''Create failed due to existing package, overwrite so update existing
package.'''
base_url = 'https://demo.ckan.org/api/3/action/'
package_create_url = '{}package_create'.format(base_url)
package_update_url = '{}package_update'.format(base_url)
mock_request.post(package_create_url,
json={
'success': False,
'error': {"__type": "Validation Error",
"name": ["That URL is already in use."]}
})
mock_request.post(package_update_url,
json={
'success': True,
'result': {'id': 'ckan-package-id'}})
# input arguments used by our mock `ingest`
datapackage = {
'name': 'my-datapackage',
'project': 'my-project',
'resources': []
}
params = {
'ckan-host': 'https://demo.ckan.org',
'ckan-api-key': 'my-api-key',
'overwrite_existing': True
}
# Path to the processor we want to test
processor_dir = \
os.path.dirname(datapackage_pipelines_ckan.processors.__file__)
processor_path = os.path.join(processor_dir, 'dump/to_ckan.py')
# Trigger the processor with our mock `ingest` and capture what it will
# returned to `spew`.
spew_args, _ = mock_dump_test(processor_path,
(params, datapackage, []))
spew_res_iter = spew_args[1]
assert list(spew_res_iter) == []
requests = mock_request.request_history
assert len(requests) == 2
assert requests[0].url == package_create_url
assert requests[1].url == package_update_url
@requests_mock.mock()
def test_dump_to_ckan_package_create_resources(self, mock_request):
'''Create package with non-streaming resources.'''
base_url = 'https://demo.ckan.org/api/3/action/'
package_create_url = '{}package_create'.format(base_url)
resource_create_url = '{}resource_create'.format(base_url)
mock_request.post(package_create_url,
json={
'success': True,
'result': {'id': 'ckan-package-id'}})
mock_request.post(resource_create_url,
json={
'success': True,
'result': {'id': 'ckan-resource-id'}})
# input arguments used by our mock `ingest`
datapackage = {
'name': 'my-datapackage',
'project': 'my-project',
'resources': [{
"dpp:streamedFrom": "https://example.com/file.csv",
"name": "resource_not_streamed",
"path": ".",
"format": "csv"
}, {
"dpp:streamedFrom": "https://example.com/file_02.csv",
"name": "resource_not_streamed_02",
"path": "."
}]
}
params = {
'ckan-host': 'https://demo.ckan.org',
'ckan-api-key': 'my-api-key',
'overwrite_existing': True
}
# Path to the processor we want to test
processor_dir = \
os.path.dirname(datapackage_pipelines_ckan.processors.__file__)
processor_path = os.path.join(processor_dir, 'dump/to_ckan.py')
# Trigger the processor with our mock `ingest` and capture what it will
# returned to `spew`.
spew_args, _ = mock_dump_test(processor_path,
(params, datapackage, []))
spew_res_iter = spew_args[1]
assert list(spew_res_iter) == []
requests = mock_request.request_history
assert len(requests) == 3
assert requests[0].url == package_create_url
assert requests[1].url == resource_create_url
assert requests[2].url == resource_create_url
@requests_mock.mock()
def test_dump_to_ckan_package_create_streaming_resource(self,
mock_request):
'''Create package with streaming resource.'''
base_url = 'https://demo.ckan.org/api/3/action/'
package_create_url = '{}package_create'.format(base_url)
resource_create_url = '{}resource_create'.format(base_url)
mock_request.post(package_create_url,
json={
'success': True,
'result': {'id': 'ckan-package-id'}})
mock_request.post(resource_create_url,
json={
'success': True,
'result': {'id': 'ckan-resource-id'}})
# input arguments used by our mock `ingest`
datapackage = {
'name': 'my-datapackage',
'project': 'my-project',
'resources': [{
"dpp:streamedFrom": "https://example.com/file.csv",
"dpp:streaming": True,
"name": "resource_streamed.csv",
"path": "data/file.csv",
'schema': {'fields': [
{'name': 'first', 'type': 'string'},
{'name': 'last', 'type': 'string'}
]}
}, {
"dpp:streamedFrom": "https://example.com/file_02.csv",
"name": "resource_not_streamed.csv",
"path": "."
}]
}
params = {
'ckan-host': 'https://demo.ckan.org',
'ckan-api-key': 'my-api-key',
'overwrite_existing': True,
'force-format': True
}
# Path to the processor we want to test
processor_dir = \
os.path.dirname(datapackage_pipelines_ckan.processors.__file__)
processor_path = os.path.join(processor_dir, 'dump/to_ckan.py')
# Trigger the processor with our mock `ingest` and capture what it will
# returned to `spew`.
json_file = {'first': 'Fred', 'last': 'Smith'}
json_file = json.dumps(json_file)
spew_args, _ = mock_dump_test(
processor_path,
(params, datapackage,
iter([ResourceIterator(io.StringIO(json_file),
datapackage['resources'][0],
{'schema': {'fields': []}})
])))
spew_res_iter = spew_args[1]
for r in spew_res_iter:
list(r) # iterate the row to yield it
requests = mock_request.request_history
assert len(requests) == 3
assert requests[0].url == package_create_url
assert requests[1].url == resource_create_url
assert requests[2].url == resource_create_url
@requests_mock.mock()
def test_dump_to_ckan_package_create_streaming_resource_fail(self,
mock_request):
'''Create package with streaming resource, which failed to create
resource.'''
base_url = 'https://demo.ckan.org/api/3/action/'
package_create_url = '{}package_create'.format(base_url)
resource_create_url = '{}resource_create'.format(base_url)
mock_request.post(package_create_url,
json={
'success': True,
'result': {'id': 'ckan-package-id'}})
mock_request.post(resource_create_url,
json={
'success': False,
'error': {"__type": "Validation Error",
"name": ["Some validation error."]}
})
# input arguments used by our mock `ingest`
datapackage = {
'name': 'my-datapackage',
'project': 'my-project',
'resources': [{
"dpp:streamedFrom": "https://example.com/file.csv",
"dpp:streaming": True,
"name": "resource_streamed.csv",
"path": "data/file.csv",
'schema': {'fields': [
{'name': 'first', 'type': 'string'},
{'name': 'last', 'type': 'string'}
]}
}, {
"dpp:streamedFrom": "https://example.com/file_02.csv",
"name": "resource_not_streamed.csv",
"path": "."
}]
}
params = {
'ckan-host': 'https://demo.ckan.org',
'ckan-api-key': 'my-api-key',
'overwrite_existing': True,
'force-format': True
}
# Path to the processor we want to test
processor_dir = \
os.path.dirname(datapackage_pipelines_ckan.processors.__file__)
processor_path = os.path.join(processor_dir, 'dump/to_ckan.py')
# Trigger the processor with our mock `ingest` and capture what it will
# returned to `spew`.
json_file = {'first': 'Fred', 'last': 'Smith'}
json_file = json.dumps(json_file)
spew_args, _ = mock_dump_test(
processor_path,
(params, datapackage,
iter([ResourceIterator(io.StringIO(json_file),
datapackage['resources'][0],
{'schema': {'fields': []}})
])))
spew_res_iter = spew_args[1]
with self.assertRaises(Exception):
for r in spew_res_iter:
list(r) # iterate the row to yield it
@requests_mock.mock()
def test_dump_to_ckan_package_create_streaming_resource_datastore(self, mock_request): # noqa
'''Create package with streaming resource, and pushing to datastore.'''
package_id = 'ckan-package-id'
base_url = 'https://demo.ckan.org/api/3/action/'
package_create_url = '{}package_create'.format(base_url)
resource_create_url = '{}resource_create'.format(base_url)
package_show_url = '{}package_show?id={}'.format(base_url, package_id)
datastore_search_url = \
'{}datastore_search?resource_id=_table_metadata'.format(base_url)
datastore_create_url = '{}datastore_create'.format(base_url)
datastore_upsert_url = '{}datastore_upsert'.format(base_url)
mock_request.post(package_create_url,
json={
'success': True,
'result': {'id': package_id}})
mock_request.post(resource_create_url,
json={
'success': True,
'result': {'id': 'ckan-resource-id'}})
mock_request.get(package_show_url,
json={
'success': True,
'result': {
'id': '7766839b-face-4336-8e1a-3c51c5e7634d',
'resources': [
{
'name': 'co2-mm-mlo_csv_not_streamed',
'format': 'CSV',
'url': 'https://pkgstore.datahub.io/core/co2-ppm:co2-mm-mlo_csv/data/co2-mm-mlo_csv.csv',
'datastore_active': False,
'cache_last_updated': None,
'package_id': '7766839b-face-4336-8e1a-3c51c5e7634d',
'id': '329e4271-8cc3-48c9-a219-c8eab52acc65',
}, {
'name': 'co2-mm-mlo_csv_streamed',
'encoding': 'utf-8',
'url': 'https://demo.ckan.org/dataset/7766839b-face-4336-8e1a-3c51c5e7634d/resource/723380d7-688a-465f-b0bd-ff6d1ec25680/download/co2-mm-mlo_csv_streamed.csv',
'datastore_active': False,
'format': 'CSV',
'package_id': '7766839b-face-4336-8e1a-3c51c5e7634d',
'id': '723380d7-688a-465f-b0bd-ff6d1ec25680',
}
],
'num_resources': 2,
'name': 'test-dataset-010203',
'title': 'Test Dataset'
}
})
mock_request.get(datastore_search_url,
json={
'success': True,
'result': {
'resource_id': '_table_metadata',
'records': []
}})
mock_request.post(datastore_create_url,
json={
'success': True,
'result': {
'resource_id': '7564690e-86ec-44de-a3f5-2cff9cbb521f'
}
})
mock_request.post(datastore_upsert_url,
json={
'success': True
})
# input arguments used by our mock `ingest`
datapackage = {
'name': 'my-datapackage',
'project': 'my-project',
'resources': [{
"dpp:streamedFrom": "https://example.com/file.csv",
"dpp:streaming": True,
"name": "resource_streamed.csv",
"path": "data/file.csv",
'schema': {'fields': [
{'name': 'first', 'type': 'string'},
{'name': 'last', 'type': 'string'}
]}
}, {
"dpp:streamedFrom": "https://example.com/file_02.csv",
"name": "resource_not_streamed.csv",
"path": "."
}]
}
params = {
'ckan-host': 'https://demo.ckan.org',
'ckan-api-key': 'my-api-key',
'overwrite_existing': True,
'force-format': True,
'push_resources_to_datastore': True
}
# Path to the processor we want to test
processor_dir = \
os.path.dirname(datapackage_pipelines_ckan.processors.__file__)
processor_path = os.path.join(processor_dir, 'dump/to_ckan.py')
# Trigger the processor with our mock `ingest` and capture what it will
# returned to `spew`.
json_file = {'first': 'Fred', 'last': 'Smith'}
json_file = json.dumps(json_file)
spew_args, _ = mock_dump_test(
processor_path,
(params, datapackage,
iter([ResourceIterator(io.StringIO(json_file),
datapackage['resources'][0],
{'schema': {'fields': []}})
])))
spew_res_iter = spew_args[1]
for r in spew_res_iter:
list(r) # iterate the row to yield it
requests = mock_request.request_history
assert len(requests) == 7
assert requests[0].url == package_create_url
assert requests[1].url == resource_create_url
assert requests[2].url == resource_create_url
assert requests[3].url == package_show_url
assert requests[4].url.startswith(datastore_search_url)
assert requests[5].url == datastore_create_url
assert requests[6].url == datastore_upsert_url
@requests_mock.mock()
def test_dump_to_ckan_package_create_streaming_resource_datastore_method_invalid(self, mock_request): # noqa
'''Create package with streaming resource, and pushing to datastore,
with an invalid method.'''
# input arguments used by our mock `ingest`
datapackage = {
'name': 'my-datapackage',
'project': 'my-project',
'resources': [{
"dpp:streamedFrom": "https://example.com/file.csv",
"dpp:streaming": True,
"name": "resource_streamed.csv",
"path": "data/file.csv",
'schema': {'fields': [
{'name': 'first', 'type': 'string'},
{'name': 'last', 'type': 'string'}
]}
}, {
"dpp:streamedFrom": "https://example.com/file_02.csv",
"name": "resource_not_streamed.csv",
"path": "."
}]
}
params = {
'ckan-host': 'https://demo.ckan.org',
'ckan-api-key': 'my-api-key',
'overwrite_existing': True,
'force-format': True,
'push_resources_to_datastore': True,
'push_resources_to_datastore_method': 'invalid'
}
# Path to the processor we want to test
processor_dir = \
os.path.dirname(datapackage_pipelines_ckan.processors.__file__)
processor_path = os.path.join(processor_dir, 'dump/to_ckan.py')
# Trigger the processor with our mock `ingest` and capture what it will
# returned to `spew`.
json_file = {'first': 'Fred', 'last': 'Smith'}
json_file = json.dumps(json_file)
with self.assertRaises(RuntimeError):
spew_args, _ = mock_dump_test(
processor_path,
(params, datapackage,
iter([ResourceIterator(io.StringIO(json_file),
datapackage['resources'][0],
{'schema': {'fields': []}})
])))
| 40.317604
| 199
| 0.509115
| 2,158
| 22,215
| 5.000927
| 0.101946
| 0.030856
| 0.029652
| 0.023721
| 0.827557
| 0.805967
| 0.795775
| 0.789474
| 0.761583
| 0.752873
| 0
| 0.016651
| 0.375512
| 22,215
| 550
| 200
| 40.390909
| 0.761263
| 0.096061
| 0
| 0.761574
| 0
| 0.00463
| 0.214425
| 0.036314
| 0
| 0
| 0
| 0
| 0.06713
| 1
| 0.020833
| false
| 0
| 0.027778
| 0
| 0.053241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb9fb99aed99ebb3c17de317761d0f1013b4385c
| 4,543
|
py
|
Python
|
pyaz/billing/profile/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/billing/profile/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/billing/profile/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage billing profile of billing account
'''
from ... pyaz_utils import _call_az
def list(account_name, expand=None):
'''
List the billing profiles that a user has access to. The operation is supported for billing accounts with agreement type Microsoft Customer Agreement or Microsoft Partner Agreement.
Required Parameters:
- account_name -- The ID that uniquely identifies a billing account.
Optional Parameters:
- expand -- May be used to expand the invoice sections.
'''
return _call_az("az billing profile list", locals())
def show(account_name, name, expand=None):
'''
Get a billing profile by its ID. The operation is supported for billing accounts with agreement type Microsoft Customer Agreement or Microsoft Partner Agreement.
Required Parameters:
- account_name -- The ID that uniquely identifies a billing account.
- name -- The ID that uniquely identifies a billing profile.
Optional Parameters:
- expand -- May be used to expand the invoice sections.
'''
return _call_az("az billing profile show", locals())
def create(account_name, name, bill_to=None, display_name=None, enabled_azure_plans=None, invoice_email_opt_in=None, invoice_sections_value=None, no_wait=None, po_number=None):
'''
Creates or updates a billing profile. The operation is supported for billing accounts with agreement type Microsoft Customer Agreement or Microsoft Partner Agreement.
Required Parameters:
- account_name -- The ID that uniquely identifies a billing account.
- name -- The ID that uniquely identifies a billing profile.
Optional Parameters:
- bill_to -- Billing address.
- display_name -- The name of the billing profile.
- enabled_azure_plans -- Information about the enabled azure plans.
- invoice_email_opt_in -- Flag controlling whether the invoices for the billing profile are sent through email.
- invoice_sections_value -- The invoice sections associated to the billing profile. Expected value: json-string/@json-file.
- no_wait -- Do not wait for the long-running operation to finish.
- po_number -- The purchase order name that will appear on the invoices generated for the billing profile.
'''
return _call_az("az billing profile create", locals())
def update(account_name, name, bill_to=None, display_name=None, enabled_azure_plans=None, invoice_email_opt_in=None, invoice_sections_value=None, no_wait=None, po_number=None):
'''
Creates or updates a billing profile. The operation is supported for billing accounts with agreement type Microsoft Customer Agreement or Microsoft Partner Agreement.
Required Parameters:
- account_name -- The ID that uniquely identifies a billing account.
- name -- The ID that uniquely identifies a billing profile.
Optional Parameters:
- bill_to -- Billing address.
- display_name -- The name of the billing profile.
- enabled_azure_plans -- Information about the enabled azure plans.
- invoice_email_opt_in -- Flag controlling whether the invoices for the billing profile are sent through email.
- invoice_sections_value -- The invoice sections associated to the billing profile. Expected value: json-string/@json-file.
- no_wait -- Do not wait for the long-running operation to finish.
- po_number -- The purchase order name that will appear on the invoices generated for the billing profile.
'''
return _call_az("az billing profile update", locals())
def wait(account_name, name, created=None, custom=None, deleted=None, exists=None, expand=None, interval=None, timeout=None, updated=None):
'''
Place the CLI in a waiting state until a condition of the billing profile is met.
Required Parameters:
- account_name -- The ID that uniquely identifies a billing account.
- name -- The ID that uniquely identifies a billing profile.
Optional Parameters:
- created -- wait until created with 'provisioningState' at 'Succeeded'
- custom -- Wait until the condition satisfies a custom JMESPath query. E.g. provisioningState!='InProgress', instanceView.statuses[?code=='PowerState/running']
- deleted -- wait until deleted
- exists -- wait until the resource exists
- expand -- May be used to expand the invoice sections.
- interval -- polling interval in seconds
- timeout -- maximum wait in seconds
- updated -- wait until updated with provisioningState at 'Succeeded'
'''
return _call_az("az billing profile wait", locals())
| 48.849462
| 185
| 0.74048
| 615
| 4,543
| 5.360976
| 0.206504
| 0.093418
| 0.038217
| 0.043676
| 0.762511
| 0.762511
| 0.754019
| 0.754019
| 0.754019
| 0.741583
| 0
| 0
| 0.188642
| 4,543
| 92
| 186
| 49.380435
| 0.894466
| 0.738059
| 0
| 0
| 0
| 0
| 0.127409
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
bbbf6c41bb7fb4ae28bc9212c9f92225f27f15ff
| 105,489
|
py
|
Python
|
kuaizi/fitting.py
|
AstroJacobLi/kuaizi
|
065fc45223bcf497e90b38f25c186654daf3c29d
|
[
"MIT"
] | 5
|
2021-06-03T06:45:57.000Z
|
2022-03-30T18:50:29.000Z
|
kuaizi/fitting.py
|
AstroJacobLi/kuaizi
|
065fc45223bcf497e90b38f25c186654daf3c29d
|
[
"MIT"
] | 4
|
2021-03-19T08:05:39.000Z
|
2022-03-23T18:04:16.000Z
|
kuaizi/fitting.py
|
AstroJacobLi/kuaizi
|
065fc45223bcf497e90b38f25c186654daf3c29d
|
[
"MIT"
] | 1
|
2021-06-03T06:45:59.000Z
|
2021-06-03T06:45:59.000Z
|
# Import packages
import os
import sys
import pickle
import dill
import time
import copy
import astropy.units as u
import matplotlib.pyplot as plt
import numpy as np
import scarlet
import sep
from astropy import wcs
from astropy.convolution import Box2DKernel, Gaussian2DKernel, convolve
from astropy.coordinates import SkyCoord, match_coordinates_sky
from astropy.io import fits
from astropy.table import Column, Table
from astropy.utils.data import clear_download_cache, download_file
from IPython.display import clear_output
# Initialize `unagi`
# from unagi import config, hsc, plotting
# from unagi.task import hsc_cutout, hsc_psf
# Import kuaizi
import kuaizi as kz
from kuaizi import HSC_pixel_scale, HSC_zeropoint
from kuaizi.detection import Data
from kuaizi.display import SEG_CMAP, display_single
sys.setrecursionlimit(10000)
plt.rcParams['font.size'] = 15
plt.rc('image', cmap='inferno', interpolation='none', origin='lower')
def _fitting_single_comp(lsbg, hsc_dr, cutout_halfsize=1.0, prefix='LSBG', large_away_factor=3.0, compact_away_factor=0.4):
from kuaizi.utils import padding_PSF
kz.utils.set_env(project='HSC', name='HSC_LSBG')
# kz.utils.set_matplotlib(usetex=False, fontsize=15)
index = lsbg['Seq']
lsbg_coord = SkyCoord(ra=lsbg['RAJ2000'], dec=lsbg['DEJ2000'], unit='deg')
if not os.path.isdir('./Images'):
os.mkdir('./Images')
if not os.path.isdir('./PSFs'):
os.mkdir('./PSFs')
size_ang = cutout_halfsize * u.arcmin
channels = 'griz'
cutout = hsc_cutout(
lsbg_coord,
cutout_size=size_ang,
filters=channels,
mask=True,
variance=True,
archive=hsc_dr,
use_saved=True,
output_dir='./Images/',
prefix=f'{prefix}_{index:04d}_img',
save_output=True)
psf_list = hsc_psf(
lsbg_coord,
centered=True,
filters=channels,
img_type='coadd',
verbose=True,
archive=hsc_dr,
save_output=True,
use_saved=True,
prefix=f'{prefix}_{index:04d}_psf',
output_dir='./PSFs/')
channels_list = list(channels)
# Reconstructure data
images = np.array([hdu[1].data for hdu in cutout])
w = wcs.WCS(cutout[0][1].header) # note: all bands share the same WCS here
filters = channels_list
weights = 1 / np.array([hdu[3].data for hdu in cutout])
psf_pad = padding_PSF(psf_list) # Padding PSF cutouts from HSC
psfs = scarlet.ImagePSF(np.array(psf_pad))
data = Data(images=images, weights=weights,
wcs=w, psfs=psfs, channels=channels)
_, msk_star = kz.utils.gaia_star_mask( # Generate a mask for GAIA bright stars
data.images.mean(axis=0), # averaged image
w,
pixel_scale=HSC_pixel_scale,
gaia_bright=19.5,
mask_a=694.7,
mask_b=3.8,
factor_b=1.0,
factor_f=1.4)
# This detection (after blurring the original images) finds out what is the central object and its (estimated) size
obj_cat_ori, segmap, bg_rms = kz.detection.makeCatalog(
[data],
lvl=8,
method='wavelet',
convolve=False,
# conv_radius=2,
wavelet_lvl=5,
low_freq_lvl=3,
high_freq_lvl=0,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=128,
f=3,
pixel_scale=0.168,
minarea=20,
deblend_nthresh=30,
deblend_cont=0.01,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat_ori['ra'], obj_cat_ori['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx = obj_cat_ori[np.argsort(dist)[0]]['index']
cen_obj = obj_cat_ori[cen_indx]
# print(f'# Central object is #{cen_indx}.')
# Better position for cen_obj
x, y, _ = sep.winpos(data.images.mean(
axis=0), cen_obj['x'], cen_obj['y'], 6)
ra, dec = data.wcs.wcs_pix2world(x, y, 0)
cen_obj['x'] = x
cen_obj['y'] = y
cen_obj['ra'] = ra
cen_obj['dec'] = dec
# This step masks out high freq sources after wavelet transformation
obj_cat, segmap, bg_rms = kz.detection.makeCatalog([data],
mask=msk_star,
lvl=2.5,
method='wavelet',
high_freq_lvl=1,
wavelet_lvl=3,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=32,
f=3,
pixel_scale=0.168,
minarea=5,
deblend_nthresh=30,
deblend_cont=0.001,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
for ind in np.where(dist < (compact_away_factor * cen_obj['fwhm_custom'] * HSC_pixel_scale) * u.arcsec)[0]:
# we do not mask compact sources that are nearby to the center of target galaxy
segmap[segmap == ind + 1] = 0
smooth_radius = 2
gaussian_threshold = 0.03
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask` only masks compact sources
seg_mask = (mask_conv >= gaussian_threshold)
# This step masks out bright and large contamination, which is not well-masked in previous step
obj_cat, segmap, bg_rms = kz.detection.makeCatalog(
[data],
lvl=10, # relative agressive threshold
method='vanilla',
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=40,
f=3,
pixel_scale=0.168,
minarea=20, # only want large things
deblend_nthresh=30,
deblend_cont=0.001,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
arr = np.zeros_like(segmap).astype('uint8')
sep.mask_ellipse(
arr,
cen_obj['x'],
cen_obj['y'],
cen_obj['a'],
cen_obj['b'],
cen_obj['theta'],
r=large_away_factor) # don't mask the target galaxy too much
for ind, obj in enumerate(obj_cat):
if arr[int(obj['y']), int(obj['x'])] == 1:
segmap[segmap == ind + 1] = 0
smooth_radius = 4
gaussian_threshold = 0.01
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask_large` masks large bright sources
seg_mask_large = (mask_conv >= gaussian_threshold)
# Set weights of masked pixels to zero
for layer in data.weights:
layer[msk_star.astype(bool)] = 0
layer[seg_mask.astype(bool)] = 0
layer[seg_mask_large.astype(bool)] = 0
# Construct `scarlet` frames and observation
from functools import partial
model_psf = scarlet.GaussianPSF(sigma=(0.8,) * len(filters))
model_frame = scarlet.Frame(
data.images.shape,
wcs=w,
psfs=model_psf,
channels=filters)
observation = scarlet.Observation(
data.images,
wcs=data.wcs,
psfs=data.psfs,
weights=data.weights,
channels=filters)
observation = observation.match(model_frame)
# Add sources
from scarlet.initialization import build_initialization_coadd
coadd, bg_cutoff = build_initialization_coadd(observation)
coadd[(seg_mask_large + seg_mask + msk_star.astype(bool))] = 0.0
sources = []
src = obj_cat_ori[cen_indx]
if HSC_zeropoint - 2.5 * np.log10(src['flux']) > 26.5:
# If too faint, single component
new_source = scarlet.source.SingleExtendedSource(model_frame, (src['ra'], src['dec']),
observation,
thresh=0.0,
shifting=False,
coadd=coadd,
coadd_rms=bg_cutoff)
else:
new_source = scarlet.source.MultiExtendedSource(model_frame, (src['ra'], src['dec']),
observation,
K=2, # Two components
thresh=0.01,
shifting=False)
sources.append(new_source)
# Visualize our data and mask and source
if not os.path.isdir('./Figures'):
os.mkdir('./Figures/')
fig = kz.display.display_scarlet_sources(
data,
sources,
show_ind=None,
stretch=1,
Q=1,
minimum=-0.3,
show_mark=True,
scale_bar_length=10,
add_text=f'{prefix}-{index}')
plt.savefig(f'./Figures/{prefix}-{index:04d}-img.png', bbox_inches='tight')
# Star fitting!
start = time.time()
blend = scarlet.Blend(sources, observation)
try:
blend.fit(150, 1e-4)
with open(f'./Models/{prefix}-{index:04d}-trained-model.pkl', 'wb') as fp:
pickle.dump([blend, {'e_rel': 1e-4}], fp)
fp.close()
last_loss = blend.loss[-1]
print(
f'Succeed for e_rel = 1e-4 with {len(blend.loss)} iterations! Try higher accuracy!')
for i, e_rel in enumerate([5e-4, 1e-5, 5e-5, 1e-6]):
blend.fit(150, e_rel)
if len(blend.loss) > 50: # must have more than 50 iterations
recent_loss = np.mean(blend.loss[-10:])
min_loss = np.min(blend.loss[:-10])
if recent_loss < min_loss:
print(
f'Succeed for e_rel = {e_rel} with {len(blend.loss)} iterations! Try higher accuracy!')
with open(f'./Models/{prefix}-{index:04d}-trained-model.pkl', 'wb') as fp:
pickle.dump([blend, {'e_rel': e_rel}], fp)
fp.close()
elif abs((recent_loss - min_loss) / min_loss) < 0.02:
if recent_loss < last_loss: # better than the saved model
print(
f'I am okay with relative loss difference = {abs((recent_loss - min_loss) / min_loss)}. Fitting stopped.')
with open(f'./Models/{prefix}-{index:04d}-trained-model.pkl', 'wb') as fp:
pickle.dump([blend, {'e_rel': e_rel}], fp)
fp.close()
break
else:
print(
f'Cannot achieve a global optimization with e_rel = {e_rel}.')
print("Scarlet ran for {1} iterations to logL = {2}".format(
e_rel, len(blend.loss), -blend.loss[-1]))
end = time.time()
print(f'Elapsed time for fitting: {end - start} s')
with open(f"./Models/{prefix}-{index:04d}-trained-model.pkl", "rb") as fp:
blend = pickle.load(fp)[0]
fp.close()
fig = kz.display.display_scarlet_model(
blend,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=False,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-fitting.png', bbox_inches='tight')
return blend
except Exception as e:
print(e)
return blend
def fitting_less_comp(lsbg, hsc_dr, cutout_halfsize=1.0, prefix='LSBG', large_away_factor=3.0, compact_away_factor=0.4):
clear_output()
from kuaizi.utils import padding_PSF
kz.utils.set_env(project='HSC', name='HSC_LSBG')
# kz.utils.set_matplotlib(usetex=False, fontsize=15)
index = lsbg['Seq']
lsbg_coord = SkyCoord(ra=lsbg['RAJ2000'], dec=lsbg['DEJ2000'], unit='deg')
if not os.path.isdir('./Images'):
os.mkdir('./Images')
if not os.path.isdir('./PSFs'):
os.mkdir('./PSFs')
size_ang = cutout_halfsize * u.arcmin
channels = 'griz'
cutout = hsc_cutout(
lsbg_coord,
cutout_size=size_ang,
filters=channels,
mask=True,
variance=True,
archive=hsc_dr,
use_saved=True,
output_dir='./Images/',
prefix=f'{prefix}_{index:04d}_img',
save_output=True)
psf_list = hsc_psf(
lsbg_coord,
centered=True,
filters=channels,
img_type='coadd',
verbose=True,
archive=hsc_dr,
save_output=True,
use_saved=True,
prefix=f'{prefix}_{index:04d}_psf',
output_dir='./PSFs/')
channels_list = list(channels)
# Reconstructure data
images = np.array([hdu[1].data for hdu in cutout])
w = wcs.WCS(cutout[0][1].header) # note: all bands share the same WCS here
filters = channels_list
weights = 1 / np.array([hdu[3].data for hdu in cutout])
psf_pad = padding_PSF(psf_list) # Padding PSF cutouts from HSC
psfs = scarlet.ImagePSF(np.array(psf_pad))
data = Data(images=images, weights=weights,
wcs=w, psfs=psfs, channels=channels)
_, msk_star = kz.utils.gaia_star_mask( # Generate a mask for GAIA bright stars
data.images.mean(axis=0), # averaged image
w,
pixel_scale=HSC_pixel_scale,
gaia_bright=19.5,
mask_a=694.7,
mask_b=3.8,
factor_b=1.0,
factor_f=0.6)
# This vanilla detection with very low sigma finds out where is the central object and its footprint
obj_cat_ori, segmap_ori, bg_rms = kz.detection.makeCatalog(
[data],
mask=msk_star,
lvl=1.2,
method='vanilla',
convolve=False,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=128,
f=3,
pixel_scale=0.168,
minarea=20,
deblend_nthresh=30,
deblend_cont=0.08,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat_ori['ra'], obj_cat_ori['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx = obj_cat_ori[np.argsort(dist)[0]]['index']
cen_obj = obj_cat_ori[cen_indx]
# print(f'# Central object is #{cen_indx}.')
# Better position for cen_obj
x, y, _ = sep.winpos(data.images.mean(
axis=0), cen_obj['x'], cen_obj['y'], 6)
ra, dec = data.wcs.wcs_pix2world(x, y, 0)
cen_obj['x'] = x
cen_obj['y'] = y
cen_obj['ra'] = ra
cen_obj['dec'] = dec
# This detection (after blurring the original images) finds out what is the central object and its (estimated) size
obj_cat, segmap_conv, bg_rms = kz.detection.makeCatalog(
[data],
lvl=8,
method='wavelet',
convolve=False,
wavelet_lvl=5,
low_freq_lvl=3,
high_freq_lvl=0,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=128,
f=3,
pixel_scale=0.168,
minarea=20,
deblend_nthresh=30,
deblend_cont=0.01,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx_conv = obj_cat_ori[np.argsort(dist)[0]]['index']
# This step masks out HIGH FREQUENCY sources after wavelet transformation
obj_cat, segmap, bg_rms = kz.detection.makeCatalog([data],
mask=msk_star,
lvl=2.5,
method='wavelet',
high_freq_lvl=3,
wavelet_lvl=4,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=32,
f=3,
pixel_scale=HSC_pixel_scale,
minarea=5,
deblend_nthresh=30,
deblend_cont=0.05,
sky_subtract=True)
# the footprint of central object: an ellipse with 4 * a and 4 * b
footprint = np.zeros_like(segmap, dtype=bool)
sep.mask_ellipse(footprint, cen_obj['x'], cen_obj['y'],
cen_obj['a'], cen_obj['b'], cen_obj['theta'], r=4.0)
inside_flag = [footprint[item] for item in list(
zip(obj_cat['y'].astype(int), obj_cat['x'].astype(int)))]
for ind in np.where(inside_flag)[0]:
# we do not mask compact sources that are nearby to the center of target galaxy
segmap[segmap == ind + 1] = 0
obj_cat_cpct = obj_cat[inside_flag] # catalog of compact sources
smooth_radius = 2
gaussian_threshold = 0.03
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask` only masks compact sources
seg_mask = (mask_conv >= gaussian_threshold)
# This step masks out bright and large contamination, which is not well-masked in previous step
obj_cat, segmap, bg_rms = kz.detection.makeCatalog(
[data],
lvl=10, # relative agressive threshold
method='vanilla',
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=40,
f=3,
pixel_scale=0.168,
minarea=20, # only want large things
deblend_nthresh=30,
deblend_cont=0.001,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
arr = np.zeros_like(segmap).astype('uint8')
sep.mask_ellipse(
arr,
cen_obj['x'],
cen_obj['y'],
cen_obj['a'],
cen_obj['b'],
cen_obj['theta'],
r=large_away_factor) # don't mask the target galaxy too much
for ind, obj in enumerate(obj_cat):
if arr[int(obj['y']), int(obj['x'])] == 1:
segmap[segmap == ind + 1] = 0
smooth_radius = 5
gaussian_threshold = 0.01
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask_large` masks large bright sources
seg_mask_large = (mask_conv >= gaussian_threshold)
# Set weights of masked pixels to zero
for layer in data.weights:
layer[msk_star.astype(bool)] = 0
layer[seg_mask.astype(bool)] = 0
layer[seg_mask_large.astype(bool)] = 0
# Remove objects that are masked from the compact obj catalog
catalog_c = SkyCoord(obj_cat_cpct['ra'], obj_cat_cpct['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
obj_cat_cpct.remove_rows(np.where(dist < 2 * u.arcsec)[0])
inside_flag = [
seg_mask_large[item] for item in list(
zip(obj_cat_cpct['y'].astype(int), obj_cat_cpct['x'].astype(int)))
]
obj_cat_cpct.remove_rows(np.where(inside_flag)[0])
# Construct `scarlet` frames and observation
from functools import partial
model_psf = scarlet.GaussianPSF(sigma=(0.8,) * len(filters))
model_frame = scarlet.Frame(
data.images.shape,
wcs=w,
psfs=model_psf,
channels=filters)
observation = scarlet.Observation(
data.images,
wcs=data.wcs,
psfs=data.psfs,
weights=data.weights,
channels=filters)
observation = observation.match(model_frame)
# Add sources
from scarlet.initialization import build_initialization_coadd
# Filtered coadd removes noise! Very useful for faint objects (but slow)
coadd, bg_cutoff = build_initialization_coadd(
observation, filtered_coadd=True)
coadd[(seg_mask_large + seg_mask + msk_star.astype(bool))] = 0.0
sources = []
src = obj_cat_ori[cen_indx]
if HSC_zeropoint - 2.5 * np.log10(src['flux']) > 26.:
# If too faint, single component
new_source = scarlet.source.SingleExtendedSource(model_frame, (src['ra'], src['dec']),
observation,
thresh=0.001,
shifting=False,
coadd=coadd,
coadd_rms=bg_cutoff)
else:
new_source = scarlet.source.MultiExtendedSource(model_frame, (src['ra'], src['dec']),
observation,
K=2, # Two components
thresh=0.01,
shifting=False,
coadd=coadd,
coadd_rms=bg_cutoff)
sources.append(new_source)
for k, src in enumerate(obj_cat_cpct): # compact sources
if src['fwhm_custom'] < 5: # src['b'] / src['a'] > 0.9 and
new_source = scarlet.source.PointSource(
model_frame, (src['ra'], src['dec']), observation)
else:
try:
new_source = scarlet.source.SingleExtendedSource(
model_frame, (src['ra'], src['dec']), observation, coadd=coadd, coadd_rms=bg_cutoff)
except:
new_source = scarlet.source.SingleExtendedSource(
model_frame, (src['ra'], src['dec']), observation)
sources.append(new_source)
# Visualize our data and mask and source
if not os.path.isdir('./Figures'):
os.mkdir('./Figures/')
fig = kz.display.display_scarlet_sources(
data,
sources,
show_ind=None,
stretch=1,
Q=1,
minimum=-0.3,
show_mark=True,
scale_bar_length=10,
add_text=f'{prefix}-{index}')
plt.savefig(
f'./Figures/{prefix}-{index:04d}-img-less.png', bbox_inches='tight')
# Star fitting!
start = time.time()
blend = scarlet.Blend(sources, observation)
fig = kz.display.display_scarlet_model(
blend,
zoomin_size=50,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=True,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-init-less.png', bbox_inches='tight')
try:
blend.fit(150, 1e-4)
with open(f'./Models/{prefix}-{index:04d}-trained-model-less.pkl', 'wb') as fp:
pickle.dump([blend, {'e_rel': 1e-4, 'loss': blend.loss[-1]}], fp)
fp.close()
last_loss = blend.loss[-1]
print(
f'Succeed for e_rel = 1e-4 with {len(blend.loss)} iterations! Try higher accuracy!')
for i, e_rel in enumerate([5e-4, 1e-5, 5e-5, 1e-6]):
blend.fit(150, e_rel)
if len(blend.loss) > 50: # must have more than 50 iterations
recent_loss = np.mean(blend.loss[-10:])
min_loss = np.min(blend.loss[:-10])
if recent_loss < min_loss:
print(
f'Succeed for e_rel = {e_rel} with {len(blend.loss)} iterations! Try higher accuracy!')
with open(f'./Models/{prefix}-{index:04d}-trained-model-less.pkl', 'wb') as fp:
pickle.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
elif abs((recent_loss - min_loss) / min_loss) < 0.02:
if recent_loss < last_loss: # better than the saved model
print(
f'I am okay with relative loss difference = {abs((recent_loss - min_loss) / min_loss)}. Fitting stopped.')
with open(f'./Models/{prefix}-{index:04d}-trained-model-less.pkl', 'wb') as fp:
pickle.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
break
else:
print(
f'Cannot achieve a global optimization with e_rel = {e_rel}.')
print("Scarlet ran for {1} iterations to logL = {2}".format(
e_rel, len(blend.loss), -blend.loss[-1]))
end = time.time()
print(f'Elapsed time for fitting: {end - start} s')
# with open(f"./Models/{prefix}-{index:04d}-trained-model.pkl", "rb") as fp:
# blend = pickle.load(fp)[0]
# fp.close()
# Find out what compose a galaxy
if len(blend.sources) > 1:
mag_mat = np.array(
[-2.5 * np.log10(kz.measure.flux(src)) + 27 for src in sources])
# g - r, g - i, g - z
color_mat = (- mag_mat + mag_mat[:, 0][:, np.newaxis])[:, 1:]
color_dist = np.linalg.norm(
color_mat - color_mat[0], axis=1) / np.linalg.norm(color_mat[0])
# np.argsort(color_dist)[:] #
sed_ind = np.where(color_dist < 0.2)[0]
dist = np.array([
np.linalg.norm(
src.center - blend.sources[0].center) * HSC_pixel_scale
for src in np.array(blend.sources)[sed_ind]
])
dist_flag = (
dist < 3 * np.sqrt(cen_obj['a'] * cen_obj['b']) * HSC_pixel_scale)
point_flag = np.array([
isinstance(src, scarlet.source.PointSource)
for src in np.array(blend.sources)[sed_ind]
])
near_cen_flag = [
(segmap_conv == cen_indx_conv +
1)[int(src.center[1]), int(src.center[0])]
for src in np.array(blend.sources)[sed_ind]
]
sed_ind = sed_ind[(~point_flag) & near_cen_flag]
if not 0 in sed_ind:
# the central source must be included.
sed_ind = np.array(list(set(sed_ind).union({0})))
else:
sed_ind = np.array([0])
print(f'Components {sed_ind} are considered as the target galaxy.')
with open(f'./Models/{prefix}-{index:04d}-trained-model-less.pkl', 'wb') as fp:
pickle.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}, sed_ind], fp)
fp.close()
fig = kz.display.display_scarlet_model(
blend,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=False,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-fitting-less.png', bbox_inches='tight')
fig = kz.display.display_scarlet_model(
blend,
show_ind=sed_ind,
zoomin_size=50,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=False,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-zoomin-less.png', bbox_inches='tight')
return blend
except Exception as e:
print(e)
return blend
def fitting_single_comp(lsbg, hsc_dr, cutout_halfsize=1.0, prefix='LSBG', large_away_factor=3.0, compact_away_factor=0.4):
clear_output()
from kuaizi.utils import padding_PSF
kz.utils.set_env(project='HSC', name='HSC_LSBG')
# kz.utils.set_matplotlib(usetex=False, fontsize=15)
index = lsbg['Seq']
lsbg_coord = SkyCoord(ra=lsbg['RAJ2000'], dec=lsbg['DEJ2000'], unit='deg')
if not os.path.isdir('./Images'):
os.mkdir('./Images')
if not os.path.isdir('./PSFs'):
os.mkdir('./PSFs')
size_ang = cutout_halfsize * u.arcmin
channels = 'griz'
cutout = hsc_cutout(
lsbg_coord,
cutout_size=size_ang,
filters=channels,
mask=True,
variance=True,
archive=hsc_dr,
use_saved=True,
output_dir='./Images/',
prefix=f'{prefix}_{index:04d}_img',
save_output=True)
psf_list = hsc_psf(
lsbg_coord,
centered=True,
filters=channels,
img_type='coadd',
verbose=True,
archive=hsc_dr,
save_output=True,
use_saved=True,
prefix=f'{prefix}_{index:04d}_psf',
output_dir='./PSFs/')
channels_list = list(channels)
# Reconstructure data
images = np.array([hdu[1].data for hdu in cutout])
w = wcs.WCS(cutout[0][1].header) # note: all bands share the same WCS here
filters = channels_list
weights = 1 / np.array([hdu[3].data for hdu in cutout])
psf_pad = padding_PSF(psf_list) # Padding PSF cutouts from HSC
psfs = scarlet.ImagePSF(np.array(psf_pad))
data = Data(images=images, weights=weights,
wcs=w, psfs=psfs, channels=channels)
_, msk_star = kz.utils.gaia_star_mask( # Generate a mask for GAIA bright stars
data.images.mean(axis=0), # averaged image
w,
pixel_scale=HSC_pixel_scale,
gaia_bright=19.5,
mask_a=694.7,
mask_b=3.8,
factor_b=1.0,
factor_f=0.6)
# This vanilla detection with very low sigma finds out where is the central object and its footprint
obj_cat_ori, segmap_ori, bg_rms = kz.detection.makeCatalog(
[data],
lvl=1.2,
method='vanilla',
convolve=False,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=128,
f=3,
pixel_scale=0.168,
minarea=20,
deblend_nthresh=30,
deblend_cont=0.08,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat_ori['ra'], obj_cat_ori['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx = obj_cat_ori[np.argsort(dist)[0]]['index']
cen_obj = obj_cat_ori[cen_indx]
# print(f'# Central object is #{cen_indx}.')
# Better position for cen_obj
x, y, _ = sep.winpos(data.images.mean(
axis=0), cen_obj['x'], cen_obj['y'], 6)
ra, dec = data.wcs.wcs_pix2world(x, y, 0)
cen_obj['x'] = x
cen_obj['y'] = y
cen_obj['ra'] = ra
cen_obj['dec'] = dec
# This detection (after blurring the original images) finds out what is the central object and its (estimated) size
obj_cat, segmap_conv, bg_rms = kz.detection.makeCatalog(
[data],
lvl=8,
method='wavelet',
convolve=False,
wavelet_lvl=5,
low_freq_lvl=3,
high_freq_lvl=0,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=128,
f=3,
pixel_scale=0.168,
minarea=20,
deblend_nthresh=30,
deblend_cont=0.01,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx_conv = obj_cat_ori[np.argsort(dist)[0]]['index']
# This step masks out HIGH FREQUENCY sources after wavelet transformation
obj_cat, segmap, bg_rms = kz.detection.makeCatalog([data],
mask=msk_star,
lvl=2.5,
method='wavelet',
high_freq_lvl=3,
wavelet_lvl=4,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=32,
f=3,
pixel_scale=HSC_pixel_scale,
minarea=5,
deblend_nthresh=30,
deblend_cont=0.05,
sky_subtract=True)
# the footprint of central object: an ellipse with 4 * a and 4 * b
footprint = np.zeros_like(segmap, dtype=bool)
sep.mask_ellipse(footprint, cen_obj['x'], cen_obj['y'],
cen_obj['a'], cen_obj['b'], cen_obj['theta'], r=4.0)
inside_flag = [footprint[item] for item in list(
zip(obj_cat['y'].astype(int), obj_cat['x'].astype(int)))]
for ind in np.where(inside_flag)[0]:
# we do not mask compact sources that are nearby to the center of target galaxy
segmap[segmap == ind + 1] = 0
obj_cat_cpct = obj_cat[inside_flag] # catalog of compact sources
smooth_radius = 2
gaussian_threshold = 0.03
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask` only masks compact sources
seg_mask = (mask_conv >= gaussian_threshold)
# This step masks out bright and large contamination, which is not well-masked in previous step
obj_cat, segmap, bg_rms = kz.detection.makeCatalog(
[data],
lvl=10, # relative agressive threshold
method='vanilla',
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=40,
f=3,
pixel_scale=0.168,
minarea=20, # only want large things
deblend_nthresh=30,
deblend_cont=0.001,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
arr = np.zeros_like(segmap).astype('uint8')
sep.mask_ellipse(
arr,
cen_obj['x'],
cen_obj['y'],
cen_obj['a'],
cen_obj['b'],
cen_obj['theta'],
r=large_away_factor) # don't mask the target galaxy too much
for ind, obj in enumerate(obj_cat):
if arr[int(obj['y']), int(obj['x'])] == 1:
segmap[segmap == ind + 1] = 0
smooth_radius = 5
gaussian_threshold = 0.01
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask_large` masks large bright sources
seg_mask_large = (mask_conv >= gaussian_threshold)
# Set weights of masked pixels to zero
for layer in data.weights:
layer[msk_star.astype(bool)] = 0
layer[seg_mask.astype(bool)] = 0
layer[seg_mask_large.astype(bool)] = 0
# Remove objects that are masked from the compact obj catalog
catalog_c = SkyCoord(obj_cat_cpct['ra'], obj_cat_cpct['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
obj_cat_cpct.remove_rows(np.where(dist < 2 * u.arcsec)[0])
inside_flag = [
seg_mask_large[item] for item in list(
zip(obj_cat_cpct['y'].astype(int), obj_cat_cpct['x'].astype(int)))
]
obj_cat_cpct.remove_rows(np.where(inside_flag)[0])
# Construct `scarlet` frames and observation
from functools import partial
model_psf = scarlet.GaussianPSF(sigma=(0.8,) * len(filters))
model_frame = scarlet.Frame(
data.images.shape,
wcs=w,
psfs=model_psf,
channels=filters)
observation = scarlet.Observation(
data.images,
wcs=data.wcs,
psfs=data.psfs,
weights=data.weights,
channels=filters)
observation = observation.match(model_frame)
# Add sources
from scarlet.initialization import build_initialization_coadd
# Filtered coadd removes noise! Very useful for faint objects (but slow)
coadd, bg_cutoff = build_initialization_coadd(
observation, filtered_coadd=True)
coadd[(seg_mask_large + seg_mask + msk_star.astype(bool))] = 0.0
sources = []
src = obj_cat_ori[cen_indx]
if HSC_zeropoint - 2.5 * np.log10(src['flux']) > 26.5:
# If too faint, single component
new_source = scarlet.source.SingleExtendedSource(model_frame, (src['ra'], src['dec']),
observation,
thresh=0.001,
shifting=False,
coadd=coadd,
coadd_rms=bg_cutoff)
else:
new_source = scarlet.source.MultiExtendedSource(model_frame, (src['ra'], src['dec']),
observation,
K=2, # Two components
thresh=0.01,
shifting=False,
coadd=coadd,
coadd_rms=bg_cutoff)
sources.append(new_source)
# for k, src in enumerate(obj_cat_cpct): # compact sources
# if src['fwhm_custom'] < 5: # src['b'] / src['a'] > 0.9 and
# new_source = scarlet.source.PointSource(
# model_frame, (src['ra'], src['dec']), observation)
# else:
# try:
# new_source = scarlet.source.SingleExtendedSource(
# model_frame, (src['ra'], src['dec']), observation, coadd=coadd, coadd_rms=bg_cutoff)
# except:
# new_source = scarlet.source.SingleExtendedSource(
# model_frame, (src['ra'], src['dec']), observation)
# sources.append(new_source)
# Visualize our data and mask and source
if not os.path.isdir('./Figures'):
os.mkdir('./Figures/')
fig = kz.display.display_scarlet_sources(
data,
sources,
show_ind=None,
stretch=1,
Q=1,
minimum=-0.3,
show_mark=True,
scale_bar_length=10,
add_text=f'{prefix}-{index}')
plt.savefig(
f'./Figures/{prefix}-{index:04d}-img-sing.png', bbox_inches='tight')
# Star fitting!
start = time.time()
blend = scarlet.Blend(sources, observation)
fig = kz.display.display_scarlet_model(
blend,
zoomin_size=50,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=True,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-init-sing.png', bbox_inches='tight')
try:
blend.fit(150, 1e-4)
with open(f'./Models/{prefix}-{index:04d}-trained-model-sing.pkl', 'wb') as fp:
pickle.dump([blend, {'e_rel': 1e-4, 'loss': blend.loss[-1]}], fp)
fp.close()
last_loss = blend.loss[-1]
print(
f'Succeed for e_rel = 1e-4 with {len(blend.loss)} iterations! Try higher accuracy!')
for i, e_rel in enumerate([5e-4, 1e-5, 5e-5, 1e-6]):
blend.fit(150, e_rel)
if len(blend.loss) > 50: # must have more than 50 iterations
recent_loss = np.mean(blend.loss[-10:])
min_loss = np.min(blend.loss[:-10])
if recent_loss < min_loss:
print(
f'Succeed for e_rel = {e_rel} with {len(blend.loss)} iterations! Try higher accuracy!')
with open(f'./Models/{prefix}-{index:04d}-trained-model-sing.pkl', 'wb') as fp:
pickle.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
elif abs((recent_loss - min_loss) / min_loss) < 0.02:
if recent_loss < last_loss: # better than the saved model
print(
f'I am okay with relative loss difference = {abs((recent_loss - min_loss) / min_loss)}. Fitting stopped.')
with open(f'./Models/{prefix}-{index:04d}-trained-model-sing.pkl', 'wb') as fp:
pickle.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
break
else:
print(
f'Cannot achieve a global optimization with e_rel = {e_rel}.')
print("Scarlet ran for {1} iterations to logL = {2}".format(
e_rel, len(blend.loss), -blend.loss[-1]))
end = time.time()
print(f'Elapsed time for fitting: {end - start} s')
# with open(f"./Models/{prefix}-{index:04d}-trained-model.pkl", "rb") as fp:
# blend = pickle.load(fp)[0]
# fp.close()
# Find out what compose a galaxy
# seds = np.array([np.copy(src.parameters[0]) for src in blend.sources])
# corr = np.corrcoef(seds)
# sed_ind = np.argsort(corr[0, :])[::-1] # np.where(corr[0, :] > 0.99)[0]#
# # dist = np.array([
# # np.linalg.norm(src.center - blend.sources[0].center) * HSC_pixel_scale
# # for src in np.array(blend.sources)[sed_ind]
# # ])
# # dist_flag = (dist < 3 * np.sqrt(cen_obj['a'] * cen_obj['b']) * HSC_pixel_scale)
# point_flag = np.array([isinstance(src, scarlet.source.PointSource) for src in np.array(blend.sources)[sed_ind]])
# near_cen_flag = [(segmap_conv == cen_indx_conv + 1)[int(src.center[1]), int(src.center[0])] for src in np.array(blend.sources)[sed_ind]]
# sed_ind = sed_ind[(~point_flag) & near_cen_flag] # & dist_flag]
# if not 0 in sed_ind:
# sed_ind.append(0) # the central source must be included.
# print(f'Components {sed_ind} are considered as the target galaxy.')
# with open(f'./Models/{prefix}-{index:04d}-trained-model.pkl', 'wb') as fp:
# pickle.dump([blend, {'e_rel': e_rel}, sed_ind], fp)
# fp.close()
with open(f'./Models/{prefix}-{index:04d}-trained-model-sing.pkl', 'wb') as fp:
pickle.dump([blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
fig = kz.display.display_scarlet_model(
blend,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=False,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-fitting-sing.png', bbox_inches='tight')
fig = kz.display.display_scarlet_model(
blend,
zoomin_size=50,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=False,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-zoomin-sing.png', bbox_inches='tight')
return blend
except Exception as e:
print(e)
return blend
def fitting_single_comp_mockgal(index=0, prefix='MockLSBG', large_away_factor=3.0, compact_away_factor=0.4, zp=HSC_zeropoint):
clear_output()
kz.utils.set_env(project='HSC', name='HSC_LSBG')
index = index
from kuaizi.mock import MockGal
mgal = MockGal.read(f'./Models/MockGalModel/{prefix}-{index:04d}.pkl')
print(f'Opening ./Models/MockGalModel/{prefix}-{index:04d}.pkl')
channels = mgal.channels
channels_list = list(channels)
filters = channels_list
lsbg_coord = SkyCoord(
ra=mgal.model.info['ra'], dec=mgal.model.info['dec'], unit='deg')
# Reconstructure data
images = mgal.mock.images
w = mgal.mock.wcs
weights = 1 / mgal.mock.variances
psfs = scarlet.ImagePSF(np.array(mgal.mock.psfs))
data = Data(images=images, weights=weights,
wcs=w, psfs=psfs, channels=channels)
_, msk_star = kz.utils.gaia_star_mask( # Generate a mask for GAIA bright stars
data.images.mean(axis=0), # averaged image
w,
pixel_scale=HSC_pixel_scale,
gaia_bright=19.5,
mask_a=694.7,
mask_b=3.8,
factor_b=1.0,
factor_f=0.6)
# This vanilla detection with very low sigma finds out where is the central object and its footprint
obj_cat_ori, segmap_ori, bg_rms = kz.detection.makeCatalog(
[data],
mask=msk_star,
lvl=1.2,
method='vanilla',
convolve=False,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=128,
f=3,
pixel_scale=0.168,
minarea=20,
deblend_nthresh=30,
deblend_cont=0.08,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat_ori['ra'], obj_cat_ori['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx = obj_cat_ori[np.argsort(dist)[0]]['index']
cen_obj = obj_cat_ori[cen_indx]
# print(f'# Central object is #{cen_indx}.')
# Better position for cen_obj
x, y, _ = sep.winpos(data.images.mean(
axis=0), cen_obj['x'], cen_obj['y'], 6)
ra, dec = data.wcs.wcs_pix2world(x, y, 0)
cen_obj['x'] = x
cen_obj['y'] = y
cen_obj['ra'] = ra
cen_obj['dec'] = dec
# This detection (after blurring the original images) finds out what is the central object and its (estimated) size
obj_cat, segmap_conv, bg_rms = kz.detection.makeCatalog(
[data],
mask=msk_star,
lvl=8,
method='wavelet',
convolve=False,
wavelet_lvl=5,
low_freq_lvl=3,
high_freq_lvl=0,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=128,
f=3,
pixel_scale=0.168,
minarea=20,
deblend_nthresh=30,
deblend_cont=0.01,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx_conv = obj_cat_ori[np.argsort(dist)[0]]['index']
# This step masks out HIGH FREQUENCY sources after wavelet transformation
obj_cat, segmap, bg_rms = kz.detection.makeCatalog([data],
mask=msk_star,
lvl=2.5,
method='wavelet',
high_freq_lvl=3,
wavelet_lvl=4,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=32,
f=3,
pixel_scale=HSC_pixel_scale,
minarea=5,
deblend_nthresh=30,
deblend_cont=0.05,
sky_subtract=True)
# the footprint of central object: an ellipse with 4 * a and 4 * b
footprint = np.zeros_like(segmap, dtype=bool)
sep.mask_ellipse(footprint, cen_obj['x'], cen_obj['y'],
cen_obj['a'], cen_obj['b'], cen_obj['theta'], r=4.0)
inside_flag = [footprint[item] for item in list(
zip(obj_cat['y'].astype(int), obj_cat['x'].astype(int)))]
for ind in np.where(inside_flag)[0]:
# we do not mask compact sources that are nearby to the center of target galaxy
segmap[segmap == ind + 1] = 0
obj_cat_cpct = obj_cat[inside_flag] # catalog of compact sources
smooth_radius = 2
gaussian_threshold = 0.03
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask` only masks compact sources
seg_mask = (mask_conv >= gaussian_threshold)
# This step masks out bright and large contamination, which is not well-masked in previous step
obj_cat, segmap, bg_rms = kz.detection.makeCatalog(
[data],
lvl=10, # relative agressive threshold
method='vanilla',
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=40,
f=3,
pixel_scale=0.168,
minarea=20, # only want large things
deblend_nthresh=30,
deblend_cont=0.001,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
arr = np.zeros_like(segmap).astype('uint8')
sep.mask_ellipse(
arr,
cen_obj['x'],
cen_obj['y'],
cen_obj['a'],
cen_obj['b'],
cen_obj['theta'],
r=large_away_factor) # don't mask the target galaxy too much
for ind, obj in enumerate(obj_cat):
if arr[int(obj['y']), int(obj['x'])] == 1:
segmap[segmap == ind + 1] = 0
smooth_radius = 5
gaussian_threshold = 0.01
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask_large` masks large bright sources
seg_mask_large = (mask_conv >= gaussian_threshold)
# Set weights of masked pixels to zero
for layer in data.weights:
layer[msk_star.astype(bool)] = 0
layer[seg_mask.astype(bool)] = 0
layer[seg_mask_large.astype(bool)] = 0
# Remove objects that are masked from the compact obj catalog
catalog_c = SkyCoord(obj_cat_cpct['ra'], obj_cat_cpct['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
obj_cat_cpct.remove_rows(np.where(dist < 2 * u.arcsec)[0])
inside_flag = [
seg_mask_large[item] for item in list(
zip(obj_cat_cpct['y'].astype(int), obj_cat_cpct['x'].astype(int)))
]
obj_cat_cpct.remove_rows(np.where(inside_flag)[0])
# Construct `scarlet` frames and observation
from functools import partial
model_psf = scarlet.GaussianPSF(sigma=(0.8,) * len(filters))
model_frame = scarlet.Frame(
data.images.shape,
wcs=w,
psfs=model_psf,
channels=filters)
observation = scarlet.Observation(
data.images,
wcs=data.wcs,
psfs=data.psfs,
weights=data.weights,
channels=filters)
observation = observation.match(model_frame)
# Add sources
from scarlet.initialization import build_initialization_coadd
# Filtered coadd removes noise! Very useful for faint objects (but slow)
coadd, bg_cutoff = build_initialization_coadd(
observation, filtered_coadd=True)
coadd[(seg_mask_large + seg_mask + msk_star.astype(bool))] = 0.0
sources = []
src = obj_cat_ori[cen_indx]
if zp - 2.5 * np.log10(src['flux']) > 26.:
# If too faint, single component
new_source = scarlet.source.SingleExtendedSource(model_frame, (src['ra'], src['dec']),
observation,
thresh=0.001,
shifting=False,
coadd=coadd,
coadd_rms=bg_cutoff)
else:
new_source = scarlet.source.MultiExtendedSource(model_frame, (src['ra'], src['dec']),
observation,
K=2, # Two components
thresh=0.001,
shifting=False,
coadd=coadd,
coadd_rms=bg_cutoff)
sources.append(new_source)
# Visualize our data and mask and source
if not os.path.isdir('./Figures'):
os.mkdir('./Figures/')
fig = kz.display.display_scarlet_sources(
data,
sources,
show_ind=None,
stretch=1,
Q=1,
minimum=-0.3,
show_mark=True,
scale_bar_length=10,
add_text=f'{prefix}-{index}')
plt.savefig(
f'./Figures/{prefix}-{index:04d}-img-sing.png', bbox_inches='tight')
# Star fitting!
start = time.time()
blend = scarlet.Blend(sources, observation)
fig = kz.display.display_scarlet_model(
blend,
zoomin_size=50,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=True,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-init-sing.png', bbox_inches='tight')
try:
blend.fit(150, 1e-4)
with open(f'./Models/MockGalScarlet/{prefix}-{index:04d}-trained-model-sing.pkl', 'wb') as fp:
pickle.dump([blend, {'e_rel': 1e-4, 'loss': blend.loss[-1]}], fp)
fp.close()
last_loss = blend.loss[-1]
print(
f'Succeed for e_rel = 1e-4 with {len(blend.loss)} iterations! Try higher accuracy!')
for i, e_rel in enumerate([5e-4, 1e-5, 5e-5, 1e-6]):
blend.fit(150, e_rel)
if len(blend.loss) > 50: # must have more than 50 iterations
recent_loss = np.mean(blend.loss[-10:])
min_loss = np.min(blend.loss[:-10])
if recent_loss < min_loss:
print(
f'Succeed for e_rel = {e_rel} with {len(blend.loss)} iterations! Try higher accuracy!')
with open(f'./Models/MockGalScarlet/{prefix}-{index:04d}-trained-model-sing.pkl', 'wb') as fp:
pickle.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
elif abs((recent_loss - min_loss) / min_loss) < 0.02:
if recent_loss < last_loss: # better than the saved model
print(
f'I am okay with relative loss difference = {abs((recent_loss - min_loss) / min_loss)}. Fitting stopped.')
with open(f'./Models/MockGalScarlet/{prefix}-{index:04d}-trained-model-sing.pkl', 'wb') as fp:
pickle.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
break
else:
print(
f'Cannot achieve a global optimization with e_rel = {e_rel}.')
print("Scarlet ran for {1} iterations to logL = {2}".format(
e_rel, len(blend.loss), -blend.loss[-1]))
end = time.time()
print(f'Elapsed time for fitting: {end - start} s')
with open(f'./Models/MockGalScarlet/{prefix}-{index:04d}-trained-model-sing.pkl', 'wb') as fp:
pickle.dump([blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
fig = kz.display.display_scarlet_model(
blend,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=False,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-fitting-sing.png', bbox_inches='tight')
fig = kz.display.display_scarlet_model(
blend,
zoomin_size=50,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=False,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-zoomin-sing.png', bbox_inches='tight')
return blend
except Exception as e:
print(e)
return blend
def fitting_less_comp_mockgal(index=0, prefix='MockLSBG', large_away_factor=3.0, compact_away_factor=0.4, zp=HSC_zeropoint):
clear_output()
kz.utils.set_env(project='HSC', name='HSC_LSBG')
index = index
from kuaizi.mock import MockGal
mgal = MockGal.read(f'./Models/MockGalModel/{prefix}-{index:04d}.pkl')
print(f'Opening ./Models/MockGalModel/{prefix}-{index:04d}.pkl')
channels = mgal.channels
channels_list = list(channels)
filters = channels_list
lsbg_coord = SkyCoord(
ra=mgal.model.info['ra'], dec=mgal.model.info['dec'], unit='deg')
# Reconstructure data
images = mgal.mock.images
w = mgal.mock.wcs
weights = 1 / mgal.mock.variances
psfs = scarlet.ImagePSF(np.array(mgal.mock.psfs))
data = Data(images=images, weights=weights,
wcs=w, psfs=psfs, channels=channels)
_, msk_star = kz.utils.gaia_star_mask( # Generate a mask for GAIA bright stars
data.images.mean(axis=0), # averaged image
w,
pixel_scale=HSC_pixel_scale,
gaia_bright=19.5,
mask_a=694.7,
mask_b=3.8,
factor_b=1.0,
factor_f=0.6)
# This vanilla detection with very low sigma finds out where is the central object and its footprint
obj_cat_ori, segmap_ori, bg_rms = kz.detection.makeCatalog(
[data],
mask=msk_star,
lvl=1.2,
method='vanilla',
convolve=False,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=128,
f=3,
pixel_scale=0.168,
minarea=20,
deblend_nthresh=30,
deblend_cont=0.08,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat_ori['ra'], obj_cat_ori['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx = obj_cat_ori[np.argsort(dist)[0]]['index']
cen_obj = obj_cat_ori[cen_indx]
# print(f'# Central object is #{cen_indx}.')
# Better position for cen_obj
x, y, _ = sep.winpos(data.images.mean(
axis=0), cen_obj['x'], cen_obj['y'], 6)
# x, y = cen_obj['x'], cen_obj['y']
ra, dec = data.wcs.wcs_pix2world(x, y, 0)
cen_obj['x'] = x
cen_obj['y'] = y
cen_obj['ra'] = ra
cen_obj['dec'] = dec
# This detection (after blurring the original images) finds out what is the central object and its (estimated) size
obj_cat, segmap_conv, bg_rms = kz.detection.makeCatalog(
[data],
mask=msk_star,
lvl=8,
method='wavelet',
convolve=False,
wavelet_lvl=5,
low_freq_lvl=3,
high_freq_lvl=0,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=128,
f=3,
pixel_scale=0.168,
minarea=20,
deblend_nthresh=30,
deblend_cont=0.01,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx_conv = obj_cat_ori[np.argsort(dist)[0]]['index']
# This step masks out HIGH FREQUENCY sources after wavelet transformation
obj_cat, segmap, bg_rms = kz.detection.makeCatalog([data],
mask=msk_star,
lvl=2.5,
method='wavelet',
high_freq_lvl=3,
wavelet_lvl=4,
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=32,
f=3,
pixel_scale=HSC_pixel_scale,
minarea=5,
deblend_nthresh=30,
deblend_cont=0.05,
sky_subtract=True)
# the footprint of central object: an ellipse with 4 * a and 4 * b
footprint = np.zeros_like(segmap, dtype=bool)
sep.mask_ellipse(footprint, cen_obj['x'], cen_obj['y'],
cen_obj['a'], cen_obj['b'], cen_obj['theta'], r=4.0)
inside_flag = [footprint[item] for item in list(
zip(obj_cat['y'].astype(int), obj_cat['x'].astype(int)))]
for ind in np.where(inside_flag)[0]:
# we do not mask compact sources that are nearby to the center of target galaxy
segmap[segmap == ind + 1] = 0
obj_cat_cpct = obj_cat[inside_flag] # catalog of compact sources
smooth_radius = 2
gaussian_threshold = 0.03
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask` only masks compact sources
seg_mask = (mask_conv >= gaussian_threshold)
# This step masks out bright and large contamination, which is not well-masked in previous step
obj_cat, segmap, bg_rms = kz.detection.makeCatalog(
[data],
lvl=10, # relative agressive threshold
method='vanilla',
match_gaia=False,
show_fig=True,
visual_gaia=False,
b=40,
f=3,
pixel_scale=0.168,
minarea=20, # only want large things
deblend_nthresh=30,
deblend_cont=0.001,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
arr = np.zeros_like(segmap).astype('uint8')
sep.mask_ellipse(
arr,
cen_obj['x'],
cen_obj['y'],
cen_obj['a'],
cen_obj['b'],
cen_obj['theta'],
r=large_away_factor) # don't mask the target galaxy too much
for ind, obj in enumerate(obj_cat):
if arr[int(obj['y']), int(obj['x'])] == 1:
segmap[segmap == ind + 1] = 0
smooth_radius = 5
gaussian_threshold = 0.01
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask_large` masks large bright sources
seg_mask_large = (mask_conv >= gaussian_threshold)
# Set weights of masked pixels to zero
for layer in data.weights:
layer[msk_star.astype(bool)] = 0
layer[seg_mask.astype(bool)] = 0
layer[seg_mask_large.astype(bool)] = 0
# Remove objects that are masked from the compact obj catalog
catalog_c = SkyCoord(obj_cat_cpct['ra'], obj_cat_cpct['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
obj_cat_cpct.remove_rows(np.where(dist < 2 * u.arcsec)[0])
inside_flag = [
seg_mask_large[item] for item in list(
zip(obj_cat_cpct['y'].astype(int), obj_cat_cpct['x'].astype(int)))
]
obj_cat_cpct.remove_rows(np.where(inside_flag)[0])
# Construct `scarlet` frames and observation
from functools import partial
model_psf = scarlet.GaussianPSF(sigma=(0.8,) * len(filters))
model_frame = scarlet.Frame(
data.images.shape,
wcs=w,
psfs=model_psf,
channels=filters)
observation = scarlet.Observation(
data.images,
wcs=data.wcs,
psfs=data.psfs,
weights=data.weights,
channels=filters)
observation = observation.match(model_frame)
# Add sources
from scarlet.initialization import build_initialization_coadd
# Filtered coadd removes noise! Very useful for faint objects (but slow)
coadd, bg_cutoff = build_initialization_coadd(
observation, filtered_coadd=True)
coadd[(seg_mask_large + seg_mask + msk_star.astype(bool))] = 0.0
sources = []
src = obj_cat_ori[cen_indx]
if HSC_zeropoint - 2.5 * np.log10(src['flux']) > 26.:
# If too faint, single component
new_source = scarlet.source.SingleExtendedSource(model_frame, (src['ra'], src['dec']),
observation,
thresh=0.005,
shifting=False,
coadd=coadd,
coadd_rms=bg_cutoff)
else:
new_source = scarlet.source.MultiExtendedSource(model_frame, (src['ra'], src['dec']),
observation,
K=2, # Two components
thresh=0.01,
shifting=False,
coadd=coadd,
coadd_rms=bg_cutoff)
sources.append(new_source)
for k, src in enumerate(obj_cat_cpct): # compact sources
if src['fwhm_custom'] < 5: # src['b'] / src['a'] > 0.9 and
new_source = scarlet.source.PointSource(
model_frame, (src['ra'], src['dec']), observation)
else:
try:
new_source = scarlet.source.SingleExtendedSource(
model_frame, (src['ra'], src['dec']), observation, coadd=coadd, coadd_rms=bg_cutoff)
except:
new_source = scarlet.source.SingleExtendedSource(
model_frame, (src['ra'], src['dec']), observation)
sources.append(new_source)
# Visualize our data and mask and source
if not os.path.isdir('./Figures'):
os.mkdir('./Figures/')
fig = kz.display.display_scarlet_sources(
data,
sources,
show_ind=None,
stretch=1,
Q=1,
minimum=-0.3,
show_mark=True,
scale_bar_length=10,
add_text=f'{prefix}-{index}')
plt.savefig(
f'./Figures/{prefix}-{index:04d}-img-less.png', bbox_inches='tight')
# Star fitting!
start = time.time()
blend = scarlet.Blend(sources, observation)
fig = kz.display.display_scarlet_model(
blend,
zoomin_size=50,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=True,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-init-less.png', bbox_inches='tight')
try:
blend.fit(150, 1e-4)
with open(f'./Models/MockGalScarlet/{prefix}-{index:04d}-trained-model-less.pkl', 'wb') as fp:
pickle.dump([blend, {'e_rel': 1e-4, 'loss': blend.loss[-1]}], fp)
fp.close()
last_loss = blend.loss[-1]
print(
f'Succeed for e_rel = 1e-4 with {len(blend.loss)} iterations! Try higher accuracy!')
for i, e_rel in enumerate([5e-4, 1e-5, 5e-5, 1e-6]):
blend.fit(150, e_rel)
if len(blend.loss) > 50: # must have more than 50 iterations
recent_loss = np.mean(blend.loss[-10:])
min_loss = np.min(blend.loss[:-10])
if recent_loss < min_loss:
print(
f'Succeed for e_rel = {e_rel} with {len(blend.loss)} iterations! Try higher accuracy!')
with open(f'./Models/MockGalScarlet/{prefix}-{index:04d}-trained-model-less.pkl', 'wb') as fp:
pickle.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
elif abs((recent_loss - min_loss) / min_loss) < 0.02:
if recent_loss < last_loss: # better than the saved model
print(
f'I am okay with relative loss difference = {abs((recent_loss - min_loss) / min_loss)}. Fitting stopped.')
with open(f'./Models/MockGalScarlet/{prefix}-{index:04d}-trained-model-less.pkl', 'wb') as fp:
pickle.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
break
else:
print(
f'Cannot achieve a global optimization with e_rel = {e_rel}.')
print("Scarlet ran for {1} iterations to logL = {2}".format(
e_rel, len(blend.loss), -blend.loss[-1]))
end = time.time()
print(f'Elapsed time for fitting: {end - start} s')
# with open(f"./Models/{prefix}-{index:04d}-trained-model.pkl", "rb") as fp:
# blend = pickle.load(fp)[0]
# fp.close()
# Find out what compose a galaxy
if len(blend.sources) > 1:
mag_mat = np.array(
[-2.5 * np.log10(kz.measure.flux(src)) + 27 for src in sources])
# g - r, g - i, g - z
color_mat = (- mag_mat + mag_mat[:, 0][:, np.newaxis])[:, 1:]
color_dist = np.linalg.norm(
color_mat - color_mat[0], axis=1) / np.linalg.norm(color_mat[0])
# np.argsort(color_dist)[:] #
sed_ind = np.where(color_dist < 0.2)[0]
dist = np.array([
np.linalg.norm(
src.center - blend.sources[0].center) * HSC_pixel_scale
for src in np.array(blend.sources)[sed_ind]
])
dist_flag = (
dist < 3 * np.sqrt(cen_obj['a'] * cen_obj['b']) * HSC_pixel_scale)
point_flag = np.array([
isinstance(src, scarlet.source.PointSource)
for src in np.array(blend.sources)[sed_ind]
])
near_cen_flag = [
(segmap_conv == cen_indx_conv +
1)[int(src.center[1]), int(src.center[0])]
for src in np.array(blend.sources)[sed_ind]
]
sed_ind = sed_ind[(~point_flag) & near_cen_flag]
if not 0 in sed_ind:
# the central source must be included.
sed_ind = np.array(list(set(sed_ind).union({0})))
else:
sed_ind = np.array([0])
print(f'Components {sed_ind} are considered as the target galaxy.')
with open(f'./Models/MockGalScarlet/{prefix}-{index:04d}-trained-model-less.pkl', 'wb') as fp:
pickle.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}, sed_ind], fp)
fp.close()
fig = kz.display.display_scarlet_model(
blend,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=False,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-fitting-less.png', bbox_inches='tight')
fig = kz.display.display_scarlet_model(
blend,
show_ind=sed_ind,
zoomin_size=50,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=True,
show_mark=False,
scale_bar=False)
plt.savefig(
f'./Figures/{prefix}-{index:04d}-zoomin-less.png', bbox_inches='tight')
return blend
except Exception as e:
print(e)
return blend
def _fitting_wavelet(data, coord, pixel_scale=HSC_pixel_scale, zp=HSC_zeropoint, starlet_thresh=0.8, prefix='mockgal',
index=0, model_dir='./Model', figure_dir='./Figure', show_figure=True, tigress=False):
'''
This is a fitting function for internal use. It fits the galaxy using Starlet model, and apply a mask after fitting.
'''
from scarlet import Starlet
lsbg_coord = coord
# 2 whitespaces before "-", i.e., 4 whitespaces before word
print(' - Detect sources and make mask')
print(' Query GAIA stars...')
gaia_cat, msk_star = kz.utils.gaia_star_mask( # Generate a mask for GAIA bright stars
data.images.mean(axis=0), # averaged image
data.wcs,
pixel_scale=pixel_scale,
gaia_bright=19.5,
mask_a=694.7,
mask_b=3.8,
factor_b=1.0,
factor_f=1.5,
tigress=tigress)
# This vanilla detection with very low sigma finds out where is the central object and its footprint
obj_cat_ori, segmap_ori, bg_rms = kz.detection.makeCatalog(
[data],
lvl=1.2,
mask=msk_star,
method='vanilla',
convolve=False,
match_gaia=False,
show_fig=show_figure,
visual_gaia=False,
b=128,
f=3,
pixel_scale=pixel_scale,
minarea=20,
deblend_nthresh=48,
deblend_cont=0.07, # 0.07, I changed it to 0.1
sky_subtract=True)
catalog_c = SkyCoord(obj_cat_ori['ra'], obj_cat_ori['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx_ori = obj_cat_ori[np.argsort(dist)[0]]['index']
cen_obj = obj_cat_ori[cen_indx_ori]
# Better position for cen_obj
x, y, _ = sep.winpos(data.images.mean(
axis=0), cen_obj['x'], cen_obj['y'], 6)
ra, dec = data.wcs.wcs_pix2world(x, y, 0)
cen_obj['x'] = x
cen_obj['y'] = y
cen_obj['ra'] = ra
cen_obj['dec'] = dec
# We roughly guess the box size of the Starlet model
model_psf = scarlet.GaussianPSF(sigma=(0.8,) * len(data.channels))
model_frame = scarlet.Frame(
data.images.shape,
wcs=data.wcs,
psf=model_psf,
channels=list(data.channels))
observation = scarlet.Observation(
data.images,
wcs=data.wcs,
psf=data.psfs,
weights=data.weights,
channels=list(data.channels))
observation = observation.match(model_frame)
cen_obj = obj_cat_ori[cen_indx_ori]
starlet_source = scarlet.StarletSource(model_frame,
(cen_obj['ra'], cen_obj['dec']),
observation,
thresh=0.001,
min_grad=-0.3, # the initial guess of box size is as large as possible
starlet_thresh=5e-3)
starlet_extent = kz.display.get_extent(
starlet_source.bbox) # [x1, x2, y1, y2]
# extra enlarge
starlet_extent[0] -= 5
starlet_extent[2] -= 5
starlet_extent[1] += 5
starlet_extent[3] += 5
# Show the Starlet initial box
fig = display_single(data.images.mean(axis=0))
from matplotlib.patches import Rectangle
box_kwargs = {"facecolor": "none", "edgecolor": "w", "lw": 0.5}
rect = Rectangle(
(starlet_extent[0], starlet_extent[2]),
starlet_extent[1] - starlet_extent[0],
starlet_extent[3] - starlet_extent[2],
**box_kwargs
)
ax = plt.gca()
ax.add_patch(rect)
plt.close()
if gaia_cat is not None:
star_flag = [(item[0] > starlet_extent[0]) & (item[0] < starlet_extent[1]) &
(item[1] > starlet_extent[2]) & (
item[1] < starlet_extent[3])
for item in np.asarray(
data.wcs.wcs_world2pix(gaia_cat['ra'], gaia_cat['dec'], 0), dtype=int).T]
# "star_cat" is a catalog for GAIA stars which fall in the Starlet box
star_cat = gaia_cat[star_flag]
_, msk_star = kz.utils.gaia_star_mask( # Generate GAIA mask only for stars outside of the Starlet box
data.images.mean(axis=0),
data.wcs,
gaia_stars=gaia_cat[~np.array(star_flag)],
pixel_scale=pixel_scale,
gaia_bright=19.5,
mask_a=694.7,
mask_b=3.8,
factor_b=1.0,
factor_f=0.6,
tigress=tigress)
else:
star_cat = []
# This step masks out high frequency sources by doing wavelet transformation
obj_cat, segmap_highfreq, bg_rms = kz.detection.makeCatalog([data],
mask=msk_star,
lvl=2., # 2.5
method='wavelet',
high_freq_lvl=2, # 3
wavelet_lvl=4,
match_gaia=False,
show_fig=show_figure,
visual_gaia=False,
b=24,
f=3,
pixel_scale=pixel_scale,
minarea=3,
deblend_nthresh=30,
deblend_cont=0.03,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx_highfreq = obj_cat[np.argsort(dist)[0]]['index']
# Don't mask out objects that fall in the segmap of the central object and the Starlet box
segmap = segmap_highfreq.copy()
# overlap_flag is for objects which fall in the footprint of central galaxy in the fist SEP detection
overlap_flag = [(segmap_ori == (cen_indx_ori + 1))[item]
for item in list(zip(obj_cat['y'].astype(int), obj_cat['x'].astype(int)))]
# box_flat is for objects which fall in the initial Starlet box
box_flag = np.unique(
segmap[starlet_extent[2]:starlet_extent[3], starlet_extent[0]:starlet_extent[1]]) - 1
box_flag = np.delete(np.sort(box_flag), 0)
overlap_flag = np.array(overlap_flag)
overlap_flag[box_flag] = True
obj_cat_cpct = obj_cat[overlap_flag]
# Remove the source if it is the central galaxy
if dist[cen_indx_highfreq] < 1 * u.arcsec:
obj_cat_cpct.remove_rows(
np.where(obj_cat_cpct['index'] == cen_indx_highfreq)[0])
for ind in np.where(overlap_flag)[0]:
segmap[segmap == ind + 1] = 0
smooth_radius = 2
gaussian_threshold = 0.03
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask` only masks compact sources
seg_mask = (mask_conv >= gaussian_threshold)
# This step masks out bright and large contamination, which is not well-masked in previous step
obj_cat, segmap_big, bg_rms = kz.detection.makeCatalog(
[data],
lvl=4.5, # relative agressive threshold
method='vanilla',
match_gaia=False,
show_fig=show_figure,
visual_gaia=False,
b=45,
f=3,
pixel_scale=pixel_scale,
minarea=20, # only want large things
deblend_nthresh=30,
deblend_cont=0.02,
sky_subtract=True)
catalog_c = SkyCoord(obj_cat['ra'], obj_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
cen_indx_big = obj_cat_ori[np.argsort(dist)[0]]['index']
# mask out big objects that are NOT identified in the high_freq step
segmap = segmap_big.copy()
box_flag = np.unique(
segmap[starlet_extent[2]:starlet_extent[3], starlet_extent[0]:starlet_extent[1]]) - 1
box_flag = np.delete(np.sort(box_flag), 0)
for ind in box_flag:
segmap[segmap == ind + 1] = 0
box_flag = np.delete(box_flag, np.where(box_flag == cen_indx_big)[
0]) # dont include the central galaxy
obj_cat_big = obj_cat[box_flag]
smooth_radius = 4
gaussian_threshold = 0.01
mask_conv = np.copy(segmap)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
# This `seg_mask_large` masks large bright sources
seg_mask_large = (mask_conv >= gaussian_threshold)
# Set weights of masked pixels to zero
for layer in data.weights:
layer[msk_star.astype(bool)] = 0
layer[seg_mask.astype(bool)] = 0
layer[seg_mask_large.astype(bool)] = 0
# Remove compact objects that are too close to the central
catalog_c = SkyCoord(obj_cat_cpct['ra'], obj_cat_cpct['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
obj_cat_cpct.remove_rows(np.where(dist < 3 * u.arcsec)[0])
# Remove objects that are already masked!
inside_flag = [
seg_mask_large[item] for item in list(
zip(obj_cat_cpct['y'].astype(int), obj_cat_cpct['x'].astype(int)))
]
obj_cat_cpct.remove_rows(np.where(inside_flag)[0])
# Remove big objects that are toooo near to the target
catalog_c = SkyCoord(obj_cat_big['ra'], obj_cat_big['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
obj_cat_big.remove_rows(np.where(dist < 3 * u.arcsec)[0])
# Remove objects that are already masked!
inside_flag = [
(data.weights[0] == 0)[item] for item in list(
zip(obj_cat_big['y'].astype(int), obj_cat_big['x'].astype(int)))
]
obj_cat_big.remove_rows(np.where(inside_flag)[0])
# Construct `scarlet` frames and observation
from functools import partial
model_psf = scarlet.GaussianPSF(sigma=(0.8,) * len(data.channels))
model_frame = scarlet.Frame(
data.images.shape,
wcs=data.wcs,
psf=model_psf,
channels=list(data.channels))
observation = scarlet.Observation(
data.images,
wcs=data.wcs,
psf=data.psfs,
weights=data.weights,
channels=list(data.channels))
observation = observation.match(model_frame)
sources = []
# Add central Starlet source
src = obj_cat_ori[cen_indx_ori]
# Find a better box, not too large, not too small
for min_grad in np.arange(-0.3, 0.4, 0.05):
starlet_source = scarlet.StarletSource(
model_frame,
(src['ra'], src['dec']),
observation,
thresh=0.001,
min_grad=min_grad,
starlet_thresh=starlet_thresh)
starlet_extent = kz.display.get_extent(starlet_source.bbox)
segbox = segmap_ori[starlet_extent[2]:starlet_extent[3],
starlet_extent[0]:starlet_extent[1]]
contam_ratio = 1 - \
np.sum((segbox == 0) | (segbox == cen_indx_ori + 1)) / \
np.sum(np.ones_like(segbox))
if contam_ratio <= 0.15:
break
print(f' - Wavelet modeling with the following hyperparameters:')
print(
f' min_grad = {min_grad:.2f}, starlet_thresh = {starlet_thresh:.2f} (contam_ratio = {contam_ratio:.2f}).'
)
starlet_source.center = (
np.array(starlet_source.bbox.shape) // 2 + starlet_source.bbox.origin)[1:]
sources.append(starlet_source)
# Only model "real compact" sources
if len(obj_cat_big) > 0:
# remove intersection between cpct and big objects
cpct_coor = SkyCoord(
ra=np.array(obj_cat_cpct['ra']) * u.degree,
dec=np.array(obj_cat_cpct['dec']) * u.degree)
big = SkyCoord(ra=obj_cat_big['ra'] * u.degree,
dec=obj_cat_big['dec'] * u.degree)
tempid, sep2d, _ = match_coordinates_sky(big, cpct_coor)
cpct = obj_cat_cpct[np.setdiff1d(
np.arange(len(obj_cat_cpct)), tempid[np.where(sep2d < 1 * u.arcsec)])]
else:
cpct = obj_cat_cpct
if len(star_cat) > 0:
# remove intersection between cpct and stars
star = SkyCoord(ra=star_cat['ra'], dec=star_cat['dec'], unit='deg')
cpct_coor = SkyCoord(
ra=np.array(cpct['ra']) * u.degree,
dec=np.array(cpct['dec']) * u.degree)
tempid, sep2d, _ = match_coordinates_sky(star, cpct_coor)
cpct = cpct[np.setdiff1d(np.arange(len(cpct)),
tempid[np.where(sep2d < 1 * u.arcsec)])]
for k, src in enumerate(cpct):
if src['fwhm_custom'] < 5:
new_source = scarlet.source.CompactExtendedSource(
model_frame, (src['ra'], src['dec']), observation)
else:
new_source = scarlet.source.SingleExtendedSource(
model_frame, (src['ra'], src['dec']), observation, thresh=2)
sources.append(new_source)
# IF GAIA stars are within the box: exclude it from the big_cat
if len(obj_cat_big) > 0:
if len(star_cat) > 0:
star = SkyCoord(ra=star_cat['ra'], dec=star_cat['dec'], unit='deg')
tempid, sep2d, _ = match_coordinates_sky(star, big)
big_cat = obj_cat_big[np.setdiff1d(
np.arange(len(obj_cat_big)), tempid[np.where(sep2d < 1 * u.arcsec)])]
else:
big_cat = obj_cat_big
# [np.where(sep2d > 2 * u.arcsec)[0]]
for k, src in enumerate(big_cat):
if src['fwhm_custom'] > 15:
new_source = scarlet.source.ExtendedSource(
model_frame, (src['ra'], src['dec']), observation, K=2, shifting=True)
else:
# try:
new_source = scarlet.source.SingleExtendedSource(
model_frame, (src['ra'], src['dec']), observation, thresh=2, shifting=True)
# except:
# new_source = scarlet.source.SingleExtendedSource(
# model_frame, (src['ra'], src['dec']), observation, coadd=coadd, coadd_rms=bg_cutoff)
sources.append(new_source)
if len(star_cat) > 0:
for k, src in enumerate(star_cat):
new_source = scarlet.source.ExtendedSource(
model_frame, (src['ra'], src['dec']), observation, K=2)
sources.append(new_source)
# Visualize our data and mask and source
if not os.path.isdir(figure_dir):
os.mkdir(figure_dir)
if not os.path.isdir(model_dir):
os.mkdir(model_dir)
fig = kz.display.display_scarlet_sources(
data,
sources,
show_ind=None,
stretch=1,
Q=1,
minimum=-0.3,
show_mark=True,
scale_bar_length=10,
add_text=f'{prefix}-{index}')
plt.savefig(
os.path.join(figure_dir, f'{prefix}-{index:04d}-src-wavelet.png'), bbox_inches='tight')
if not show_figure:
plt.close()
# Star fitting!
start = time.time()
blend = scarlet.Blend(sources, observation)
fig = kz.display.display_scarlet_model(
blend,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=False,
show_mask=False,
show_mark=True,
scale_bar=False)
plt.savefig(
os.path.join(figure_dir, f'{prefix}-{index:04d}-init-wavelet.png'), bbox_inches='tight')
if not show_figure:
plt.close()
try:
blend.fit(150, 1e-4)
with open(os.path.join(model_dir, f'{prefix}-{index:04d}-trained-model-wavelet.df'), 'wb') as fp:
dill.dump([blend, {'e_rel': 1e-4, 'loss': blend.loss[-1]}], fp)
fp.close()
last_loss = blend.loss[-1]
print(
f' Optimizaiton: Succeed for e_rel = 1e-4 with {len(blend.loss)} iterations! Try higher accuracy!')
for i, e_rel in enumerate([5e-4, 1e-5, 5e-5, 1e-6]):
blend.fit(150, e_rel)
if len(blend.loss) > 50: # must have more than 50 iterations
recent_loss = np.mean(blend.loss[-10:])
min_loss = np.min(blend.loss[:-10])
if recent_loss < min_loss:
print(
f' Optimizaiton: Succeed for e_rel = {e_rel} with {len(blend.loss)} iterations! Try higher accuracy!')
with open(os.path.join(model_dir, f'{prefix}-{index:04d}-trained-model-wavelet.df'), 'wb') as fp:
dill.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
elif abs((recent_loss - min_loss) / min_loss) < 0.02:
if recent_loss < last_loss: # better than the saved model
print(
f' Optimizaiton: I am okay with relative loss difference = {abs((recent_loss - min_loss) / min_loss)}. Fitting stopped.')
with open(os.path.join(model_dir, f'{prefix}-{index:04d}-trained-model-wavelet.df'), 'wb') as fp:
dill.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1]}], fp)
fp.close()
break
else:
print(
f' ! Optimizaiton: Cannot achieve a global optimization with e_rel = {e_rel}.')
print(" - After {1} iterations, logL = {2}".format(
e_rel, len(blend.loss), -blend.loss[-1]))
end = time.time()
print(f' Elapsed time for fitting: {end - start} s')
# In principle, Now we don't need to find which components compose a galaxy. The central Starlet is enough!
if len(blend.sources) > 1:
mag_mat = np.array(
[-2.5 * np.log10(kz.measure.flux(src, observation)) + 27 for src in sources])
# g - r, g - i, g - z
color_mat = (- mag_mat + mag_mat[:, 0][:, np.newaxis])[:, 1:]
color_dist = np.linalg.norm(
color_mat - color_mat[0], axis=1) / np.linalg.norm(color_mat[0])
sed_ind = np.where(color_dist < 0.1)[0]
dist = np.array([
np.linalg.norm(
src.center - blend.sources[0].center) * HSC_pixel_scale
for src in np.array(blend.sources)[sed_ind]
])
dist_flag = (
dist < 3 * np.sqrt(cen_obj['a'] * cen_obj['b']) * HSC_pixel_scale)
# maybe use segmap flag? i.e., include objects that are overlaped
# with the target galaxy in the inital detection.
point_flag = np.array([
isinstance(src, scarlet.source.PointSource)
for src in np.array(blend.sources)[sed_ind]
]) # we don't want point source
near_cen_flag = [
(segmap_ori == cen_indx_ori +
1)[int(src.center[0]), int(src.center[1])] # src.center: [y, x]
for src in np.array(blend.sources)[sed_ind]
]
sed_ind = sed_ind[(~point_flag) & near_cen_flag & dist_flag]
if not 0 in sed_ind:
# the central source must be included.
sed_ind = np.array(list(set(sed_ind).union({0})))
else:
sed_ind = np.array([0])
print(
f' - Components {sed_ind} are considered as the target galaxy.')
# Only mask bright stars!!!
print(' - Masking stars and other sources that are modeled, to deal with leaky flux issue.')
# Generate a VERY AGGRESSIVE mask, named "footprint"
footprint = np.zeros_like(segmap_highfreq, dtype=bool)
# for ind in cpct['index']: # mask ExtendedSources which are modeled
# footprint[segmap_highfreq == ind + 1] = 1
# footprint[segmap_highfreq == cen_indx_highfreq + 1] = 0
sed_ind_pix = np.array([item.center for item in np.array(
sources)[sed_ind]]).astype(int) # the y and x of sed_ind objects
# # if any objects in `sed_ind` is in `segmap_highfreq`
# sed_corr_indx = segmap_highfreq[sed_ind_pix[:, 0], sed_ind_pix[:, 1]]
# for ind in sed_corr_indx:
# footprint[segmap_highfreq == ind] = 0
# smooth_radius = 1.5
# gaussian_threshold = 0.03
# mask_conv = np.copy(footprint)
# mask_conv[mask_conv > 0] = 1
# mask_conv = convolve(mask_conv.astype(
# float), Gaussian2DKernel(smooth_radius))
# footprint = (mask_conv >= gaussian_threshold)
# Mask star within the box
if len(star_cat) > 0:
_, star_mask = kz.utils.gaia_star_mask( # Generate GAIA mask only for stars outside of the Starlet box
data.images.mean(axis=0),
data.wcs,
gaia_stars=star_cat,
pixel_scale=0.168,
gaia_bright=18.,
mask_a=694.7,
mask_b=3.8,
factor_b=1.0,
factor_f=2.0)
footprint = footprint | star_mask
# Mask big objects from `big_cat`
if len(obj_cat_big) > 0:
# Blow-up radius depends on the distance to target galaxy
catalog_c = SkyCoord(big_cat['ra'], big_cat['dec'], unit='deg')
dist = lsbg_coord.separation(catalog_c)
near_flag = (dist < 4 * cen_obj['a'] * HSC_pixel_scale * u.arcsec)
footprint2 = np.zeros_like(segmap_big, dtype=bool)
# mask ExtendedSources which are modeled
for ind in big_cat[near_flag]['index']:
footprint2[segmap_big == ind + 1] = 1
# if any objects in `sed_ind` is in `segmap_big`
sed_corr_indx = segmap_big[sed_ind_pix[:, 0], sed_ind_pix[:, 1]]
for ind in sed_corr_indx:
footprint2[segmap_big == ind] = 0
footprint2[segmap_big == cen_indx_big + 1] = 0
smooth_radius = 1.5
gaussian_threshold = 0.1
mask_conv = np.copy(footprint2)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
footprint2 = (mask_conv >= gaussian_threshold)
footprint3 = np.zeros_like(segmap_big, dtype=bool)
# mask ExtendedSources which are modeled
for ind in big_cat[~near_flag]['index']:
footprint3[segmap_big == ind + 1] = 1
smooth_radius = 5
gaussian_threshold = 0.01
mask_conv = np.copy(footprint3)
mask_conv[mask_conv > 0] = 1
mask_conv = convolve(mask_conv.astype(
float), Gaussian2DKernel(smooth_radius))
footprint3 = (mask_conv >= gaussian_threshold)
footprint2 += footprint3 # combine together
# # if any objects in `sed_ind` is in `segmap_big`
# sed_corr_indx = segmap_big[sed_ind_pix[:, 0], sed_ind_pix[:, 1]]
# for ind in sed_corr_indx:
# footprint2[segmap_big == ind] = 0
# footprint2[segmap_big == cen_indx_big + 1] = 0
# This is the mask for everything except target galaxy
footprint = footprint + footprint2
outdir = os.path.join(
model_dir, f'{prefix}-{index:04d}-trained-model-wavelet.df')
print(f' - Saving the results as {os.path.abspath(outdir)}')
with open(os.path.abspath(outdir), 'wb') as fp:
dill.dump(
[blend, {'e_rel': e_rel, 'loss': blend.loss[-1], 'sed_ind': sed_ind}, footprint], fp)
fp.close()
# Save fitting figure
fig = kz.display.display_scarlet_model(
blend,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=False,
show_mark=False,
scale_bar=False)
plt.savefig(
os.path.join(figure_dir, f'{prefix}-{index:04d}-fitting-wavelet.png'), bbox_inches='tight')
if not show_figure:
plt.close()
# Save zoomin figure (non-agressively-masked, target galaxy only)
fig = kz.display.display_scarlet_model(
blend,
show_ind=sed_ind,
zoomin_size=50,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=True,
show_mask=False,
show_mark=False,
scale_bar=False)
plt.savefig(
os.path.join(figure_dir, f'{prefix}-{index:04d}-zoomin-wavelet.png'), bbox_inches='tight')
if not show_figure:
plt.close()
# Save zoomin figure (aggressively-masked, target galaxy only)
new_weights = data.weights.copy()
for layer in new_weights:
layer[footprint.astype(bool)] = 0
observation2 = scarlet.Observation(
data.images,
wcs=data.wcs,
psf=data.psfs,
weights=new_weights,
channels=list(data.channels))
observation2 = observation2.match(model_frame)
blend2 = scarlet.Blend(sources, observation2)
fig = kz.display.display_scarlet_model(
blend2,
show_ind=sed_ind,
zoomin_size=50,
minimum=-0.3,
stretch=1,
channels='griz',
show_loss=False,
show_mask=True,
show_mark=False,
scale_bar=False)
plt.savefig(
os.path.join(figure_dir, f'{prefix}-{index:04d}-zoomin-mask-wavelet.png'), bbox_inches='tight')
if not show_figure:
plt.close()
# # Save high-freq-removed figure
# ## remove high-frequency features from the Starlet objects
# for src in np.array(blend2.sources)[sed_ind]:
# if isinstance(src, scarlet.StarletSource):
# # Cutout a patch of original image
# y_cen, x_cen = np.array(src.bbox.shape)[1:] // 2 + np.array(src.bbox.origin)[1:]
# size = np.array(src.bbox.shape)[1:] // 2
# img_ = observation.data[:, y_cen - size[0]:y_cen + size[0] + 1, x_cen - size[1]:x_cen + size[1] + 1]
# morph = src.children[1]
# stlt = Starlet(morph.get_model(), direct=True)
# c = stlt.coefficients
# c[:, :2, :, :] = 0 # Remove high-frequency features
# new_morph = copy.deepcopy(morph)
# new_src = copy.deepcopy(src)
# new_morph.__init__(morph.frame, img_, coeffs=c, bbox=morph.bbox)
# src.children[1] = new_morph
# fig = kz.display.display_scarlet_model(
# blend2,
# show_ind=sed_ind,
# zoomin_size=50,
# minimum=-0.3,
# stretch=1,
# channels='griz',
# show_loss=False,
# show_mask=True,
# show_mark=False,
# scale_bar=False)
# plt.savefig(
# os.path.join(figure_dir, f'{prefix}-{index:04d}-zoomin-blur-wavelet.png'), bbox_inches='tight')
# if not show_figure:
# plt.close()
return blend
except Exception as e:
print(e)
return blend
def fitting_wavelet_observation(lsbg, hsc_dr, cutout_halfsize=1.0, starlet_thresh=0.8, prefix='LSBG', pixel_scale=HSC_pixel_scale,
zp=HSC_zeropoint, model_dir='./Models', figure_dir='./Figure', show_figure=False):
clear_output()
from kuaizi.utils import padding_PSF
kz.utils.set_env(project='HSC', name='HSC_LSBG')
# kz.utils.set_matplotlib(usetex=False, fontsize=15)
index = lsbg['Seq']
lsbg_coord = SkyCoord(ra=lsbg['RAJ2000'], dec=lsbg['DEJ2000'], unit='deg')
img_dir = './Images/'
psf_dir = './PSFs/'
if not os.path.isdir(img_dir):
os.mkdir(img_dir)
if not os.path.isdir(psf_dir):
os.mkdir(psf_dir)
size_ang = cutout_halfsize * u.arcmin
channels = 'griz'
cutout = hsc_cutout(
lsbg_coord,
cutout_size=size_ang,
filters=channels,
mask=True,
variance=True,
archive=hsc_dr,
use_saved=True,
save_output=True,
output_dir=img_dir,
prefix=f'LSBG_{index:04d}_img') # {prefix}
psf_list = hsc_psf(
lsbg_coord,
centered=True,
filters=channels,
img_type='coadd',
verbose=True,
archive=hsc_dr,
use_saved=True,
save_output=True,
output_dir=psf_dir,
prefix=f'LSBG_{index:04d}_psf')
channels_list = list(channels)
# Reconstructure data
images = np.array([hdu[1].data for hdu in cutout])
w = wcs.WCS(cutout[0][1].header) # note: all bands share the same WCS here
filters = channels_list
weights = 1 / np.array([hdu[3].data for hdu in cutout])
psf_pad = padding_PSF(psf_list) # Padding PSF cutouts from HSC
psfs = scarlet.ImagePSF(np.array(psf_pad))
data = Data(images=images, weights=weights,
wcs=w, psfs=psfs, channels=channels)
blend = _fitting_wavelet(
data, lsbg_coord, starlet_thresh=starlet_thresh, prefix=prefix, index=index, pixel_scale=pixel_scale,
zp=zp, model_dir=model_dir, figure_dir=figure_dir, show_figure=show_figure)
return blend
def fitting_wavelet_obs_tigress(lsbg, starlet_thresh=0.8, prefix='LSBG', pixel_scale=HSC_pixel_scale,
zp=HSC_zeropoint, model_dir='./Model', figure_dir='./Figure', show_figure=False):
clear_output()
from kuaizi.utils import padding_PSF
from kuaizi.mock import Data
kz.utils.set_env(project='HSC', name='LSBG',
data_dir='/home/jiaxuanl/Data')
index = lsbg['Seq']
channels = 'griz'
print(f'### Running scarlet wavelet modeling for {prefix}-{index}')
# useful for query GAIA
lsbg_coord = SkyCoord(ra=lsbg['RAJ2000'], dec=lsbg['DEJ2000'], unit='deg')
cutout = [fits.open(f"{lsbg['prefix']}_{band}.fits") for band in channels]
psf_list = [fits.open(f"{lsbg['prefix']}_{band}_psf.fits")
for band in channels]
# Reconstructure data
images = np.array([hdu[1].data for hdu in cutout])
w = wcs.WCS(cutout[0][1].header) # note: all bands share the same WCS here
weights = 1 / np.array([hdu[3].data for hdu in cutout])
psf_pad = padding_PSF(psf_list) # Padding PSF cutouts from HSC
psfs = scarlet.ImagePSF(np.array(psf_pad))
data = Data(images=images, weights=weights,
wcs=w, psfs=psfs, channels=channels)
blend = _fitting_wavelet(
data, lsbg_coord, starlet_thresh=starlet_thresh, prefix=prefix, index=index, pixel_scale=pixel_scale,
zp=zp, model_dir=model_dir, figure_dir=figure_dir, show_figure=show_figure, tigress=True)
return blend
def fitting_wavelet_mockgal(index=0, starlet_thresh=0.8, prefix='MockLSBG', pixel_scale=HSC_pixel_scale,
zp=HSC_zeropoint, model_dir='./Models/MockGalModel', output_dir='./Models/',
figure_dir='./Figure', show_figure=False):
clear_output()
kz.utils.set_env(project='HSC', name='HSC_LSBG')
index = index
from kuaizi.mock import MockGal
mgal = MockGal.read(os.path.join(
model_dir, f'{prefix}-{index:04d}.pkl'))
print('Loading', os.path.join(
model_dir, f'{prefix}-{index:04d}.pkl'))
channels = mgal.channels
channels_list = list(channels)
filters = channels_list
lsbg_coord = SkyCoord(
ra=mgal.model.info['ra'], dec=mgal.model.info['dec'], unit='deg')
# Reconstructure data
images = mgal.mock.images
w = mgal.mock.wcs
weights = 1 / mgal.mock.variances
psfs = scarlet.ImagePSF(np.array(mgal.mock.psfs))
data = Data(images=images, weights=weights,
wcs=w, psfs=psfs, channels=channels)
blend = _fitting_wavelet(
data, lsbg_coord, starlet_thresh=starlet_thresh, prefix=prefix, index=index, pixel_scale=pixel_scale,
zp=zp, model_dir=output_dir, figure_dir=figure_dir, show_figure=show_figure)
return blend
| 39.942825
| 152
| 0.545934
| 13,103
| 105,489
| 4.214226
| 0.045028
| 0.01782
| 0.016226
| 0.008113
| 0.896358
| 0.883174
| 0.871982
| 0.861171
| 0.850559
| 0.847317
| 0
| 0.024075
| 0.338898
| 105,489
| 2,640
| 153
| 39.957955
| 0.76772
| 0.135881
| 0
| 0.874765
| 0
| 0.008912
| 0.088096
| 0.030823
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004221
| false
| 0
| 0.020638
| 0
| 0.031895
| 0.038931
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bbd71fd8582cff0a1d89fb5a83fc1bf80dc26d79
| 1,007
|
py
|
Python
|
src/datasets/__init__.py
|
agnes-yang/DeepADoTS
|
4a52caf4e49bad8e057649ca05ea9522c77518fb
|
[
"MIT"
] | null | null | null |
src/datasets/__init__.py
|
agnes-yang/DeepADoTS
|
4a52caf4e49bad8e057649ca05ea9522c77518fb
|
[
"MIT"
] | null | null | null |
src/datasets/__init__.py
|
agnes-yang/DeepADoTS
|
4a52caf4e49bad8e057649ca05ea9522c77518fb
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
from .dataset import Dataset
from .kdd_cup import KDDCup
from .multivariate_anomaly_function import MultivariateAnomalyFunction
from .real_datasets import RealDataset, RealPickledDataset
from .synthetic_data_generator import SyntheticDataGenerator
from .synthetic_dataset import SyntheticDataset
__all__ = [
'Dataset',
'SyntheticDataset',
'RealDataset',
'RealPickledDataset',
'KDDCup',
'SyntheticDataGenerator',
'MultivariateAnomalyFunction'
]
=======
from .dataset import Dataset
from .kdd_cup import KDDCup
from .multivariate_anomaly_function import MultivariateAnomalyFunction
from .real_datasets import RealDataset, RealPickledDataset
from .synthetic_data_generator import SyntheticDataGenerator
from .synthetic_dataset import SyntheticDataset
__all__ = [
'Dataset',
'SyntheticDataset',
'RealDataset',
'RealPickledDataset',
'KDDCup',
'SyntheticDataGenerator',
'MultivariateAnomalyFunction'
]
>>>>>>> upstream/master
| 27.972222
| 71
| 0.76862
| 83
| 1,007
| 9.060241
| 0.277108
| 0.069149
| 0.045213
| 0.06383
| 0.976064
| 0.976064
| 0.976064
| 0.976064
| 0.976064
| 0.976064
| 0
| 0
| 0.14995
| 1,007
| 35
| 72
| 28.771429
| 0.878505
| 0
| 0
| 0.848485
| 0
| 0
| 0.215943
| 0.09889
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.363636
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
bbf8432ec4cb1c805e68fd560036b02c73f22b72
| 139
|
py
|
Python
|
code/chapter-1/exercise1_14.py
|
Kevin-Oudai/python-solutions
|
d67f6b14723b000fec0011c3e8156b805eb288f7
|
[
"MIT"
] | null | null | null |
code/chapter-1/exercise1_14.py
|
Kevin-Oudai/python-solutions
|
d67f6b14723b000fec0011c3e8156b805eb288f7
|
[
"MIT"
] | null | null | null |
code/chapter-1/exercise1_14.py
|
Kevin-Oudai/python-solutions
|
d67f6b14723b000fec0011c3e8156b805eb288f7
|
[
"MIT"
] | null | null | null |
import turtle
turtle.right(60)
turtle.forward(50)
turtle.right(120)
turtle.forward(50)
turtle.right(120)
turtle.forward(50)
turtle.done()
| 13.9
| 18
| 0.776978
| 22
| 139
| 4.909091
| 0.363636
| 0.305556
| 0.416667
| 0.583333
| 0.731481
| 0.731481
| 0.731481
| 0.731481
| 0.731481
| 0.731481
| 0
| 0.108527
| 0.071942
| 139
| 9
| 19
| 15.444444
| 0.728682
| 0
| 0
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.125
| 0
| 0.125
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
bbfa830a348927b577d555d8a1d35b262a61598d
| 942
|
py
|
Python
|
optimus/engines/base/io/save.py
|
liRONCO11/optimus
|
0ca0567267300397c7ba711483c46f94ac265e55
|
[
"Apache-2.0"
] | null | null | null |
optimus/engines/base/io/save.py
|
liRONCO11/optimus
|
0ca0567267300397c7ba711483c46f94ac265e55
|
[
"Apache-2.0"
] | null | null | null |
optimus/engines/base/io/save.py
|
liRONCO11/optimus
|
0ca0567267300397c7ba711483c46f94ac265e55
|
[
"Apache-2.0"
] | null | null | null |
class BaseSave:
def file(self, path, *args, **kwargs):
raise NotImplementedError("Not implemented yet")
def csv(self, path, *args, **kwargs):
raise NotImplementedError("Not implemented yet")
def xml(self, path, *args, **kwargs):
raise NotImplementedError("Not implemented yet")
def json(self, path, *args, **kwargs):
raise NotImplementedError("Not implemented yet")
def excel(self, path, *args, **kwargs):
raise NotImplementedError("Not implemented yet")
def avro(self, path, *args, **kwargs):
raise NotImplementedError("Not implemented yet")
def parquet(self, path, *args, **kwargs):
raise NotImplementedError("Not implemented yet")
def orc(self, path, *args, **kwargs):
raise NotImplementedError("Not implemented yet")
def hdf5(self, path, *args, **kwargs):
raise NotImplementedError("Not implemented yet")
| 32.482759
| 56
| 0.645435
| 101
| 942
| 6.019802
| 0.207921
| 0.118421
| 0.177632
| 0.266447
| 0.912829
| 0.912829
| 0.912829
| 0.912829
| 0.912829
| 0.815789
| 0
| 0.001377
| 0.229299
| 942
| 28
| 57
| 33.642857
| 0.836088
| 0
| 0
| 0.473684
| 0
| 0
| 0.181529
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.473684
| false
| 0
| 0
| 0
| 0.526316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
a56eab4367070594e81c1811b76ddfb4ded2f278
| 118
|
py
|
Python
|
Warmup-1/front_back.py
|
VivekM27/Coding-Bat-Python-Solutions
|
14d5c6ccaa2129e56a5898374dec60740fe6761b
|
[
"Apache-2.0"
] | null | null | null |
Warmup-1/front_back.py
|
VivekM27/Coding-Bat-Python-Solutions
|
14d5c6ccaa2129e56a5898374dec60740fe6761b
|
[
"Apache-2.0"
] | null | null | null |
Warmup-1/front_back.py
|
VivekM27/Coding-Bat-Python-Solutions
|
14d5c6ccaa2129e56a5898374dec60740fe6761b
|
[
"Apache-2.0"
] | null | null | null |
# FRONT_BACK
def front_back(str):
return str if(len(str) <= 1) else (str[len(str)-1] + str[1:len(str)-1] + str[0])
| 39.333333
| 82
| 0.618644
| 24
| 118
| 2.958333
| 0.416667
| 0.225352
| 0.295775
| 0.28169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.152542
| 118
| 3
| 82
| 39.333333
| 0.66
| 0.084746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
a59d06e07eab023d56ce56ccb8eebd856ed9cf82
| 116
|
py
|
Python
|
tests/input/unused-import/smoke.py
|
DKorytkin/pylint-pytest
|
097b7767e5f33ad512d421bea9ebb74a251f47bd
|
[
"MIT"
] | 37
|
2020-06-04T16:34:39.000Z
|
2022-02-27T13:00:22.000Z
|
tests/input/unused-import/smoke.py
|
DKorytkin/pylint-pytest
|
097b7767e5f33ad512d421bea9ebb74a251f47bd
|
[
"MIT"
] | 26
|
2020-07-10T15:53:19.000Z
|
2022-03-28T23:56:03.000Z
|
tests/input/unused-import/smoke.py
|
DKorytkin/pylint-pytest
|
097b7767e5f33ad512d421bea9ebb74a251f47bd
|
[
"MIT"
] | 6
|
2020-06-29T17:45:38.000Z
|
2022-02-19T01:09:57.000Z
|
from conftest import conftest_fixture_attr
def test_conftest_fixture_attr(conftest_fixture_attr):
assert True
| 19.333333
| 54
| 0.853448
| 16
| 116
| 5.75
| 0.5625
| 0.48913
| 0.619565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12069
| 116
| 5
| 55
| 23.2
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3c069f9610fbff661103c7e4de11c54530b94ec1
| 37,785
|
py
|
Python
|
venv/lib/python3.8/site-packages/azure/mgmt/security/operations/_alerts_operations.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/azure/mgmt/security/operations/_alerts_operations.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/azure/mgmt/security/operations/_alerts_operations.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from .. import models
class AlertsOperations(object):
"""AlertsOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: API version for the operation. Constant value: "2020-01-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2020-01-01"
self.config = config
def list(
self, custom_headers=None, raw=False, **operation_config):
"""List all the alerts that are associated with the subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Alert
:rtype:
~azure.mgmt.security.models.AlertPaged[~azure.mgmt.security.models.Alert]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.AlertPaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Security/alerts'}
def list_by_resource_group(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""List all the alerts that are associated with the resource group.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Alert
:rtype:
~azure.mgmt.security.models.AlertPaged[~azure.mgmt.security.models.Alert]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.AlertPaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Security/alerts'}
def list_subscription_level_alerts_by_region(
self, custom_headers=None, raw=False, **operation_config):
"""List all the alerts that are associated with the subscription that are
stored in a specific location.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Alert
:rtype:
~azure.mgmt.security.models.AlertPaged[~azure.mgmt.security.models.Alert]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_subscription_level_alerts_by_region.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'ascLocation': self._serialize.url("self.config.asc_location", self.config.asc_location, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.AlertPaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list_subscription_level_alerts_by_region.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Security/locations/{ascLocation}/alerts'}
def list_resource_group_level_alerts_by_region(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""List all the alerts that are associated with the resource group that
are stored in a specific location.
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Alert
:rtype:
~azure.mgmt.security.models.AlertPaged[~azure.mgmt.security.models.Alert]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_resource_group_level_alerts_by_region.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'ascLocation': self._serialize.url("self.config.asc_location", self.config.asc_location, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.AlertPaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list_resource_group_level_alerts_by_region.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Security/locations/{ascLocation}/alerts'}
def get_subscription_level_alert(
self, alert_name, custom_headers=None, raw=False, **operation_config):
"""Get an alert that is associated with a subscription.
:param alert_name: Name of the alert object
:type alert_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Alert or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.security.models.Alert or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_subscription_level_alert.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'ascLocation': self._serialize.url("self.config.asc_location", self.config.asc_location, 'str'),
'alertName': self._serialize.url("alert_name", alert_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Alert', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_subscription_level_alert.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Security/locations/{ascLocation}/alerts/{alertName}'}
def get_resource_group_level_alerts(
self, alert_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Get an alert that is associated a resource group or a resource in a
resource group.
:param alert_name: Name of the alert object
:type alert_name: str
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Alert or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.security.models.Alert or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_resource_group_level_alerts.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'ascLocation': self._serialize.url("self.config.asc_location", self.config.asc_location, 'str'),
'alertName': self._serialize.url("alert_name", alert_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Alert', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_resource_group_level_alerts.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Security/locations/{ascLocation}/alerts/{alertName}'}
def update_subscription_level_alert_state_to_dismiss(
self, alert_name, custom_headers=None, raw=False, **operation_config):
"""Update the alert's state.
:param alert_name: Name of the alert object
:type alert_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.update_subscription_level_alert_state_to_dismiss.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'ascLocation': self._serialize.url("self.config.asc_location", self.config.asc_location, 'str'),
'alertName': self._serialize.url("alert_name", alert_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
update_subscription_level_alert_state_to_dismiss.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Security/locations/{ascLocation}/alerts/{alertName}/dismiss'}
def update_subscription_level_state_to_resolve(
self, alert_name, custom_headers=None, raw=False, **operation_config):
"""Update the alert's state.
:param alert_name: Name of the alert object
:type alert_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.update_subscription_level_state_to_resolve.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'ascLocation': self._serialize.url("self.config.asc_location", self.config.asc_location, 'str'),
'alertName': self._serialize.url("alert_name", alert_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
update_subscription_level_state_to_resolve.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Security/locations/{ascLocation}/alerts/{alertName}/resolve'}
def update_subscription_level_alert_state_to_reactivate(
self, alert_name, custom_headers=None, raw=False, **operation_config):
"""Update the alert's state.
:param alert_name: Name of the alert object
:type alert_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.update_subscription_level_alert_state_to_reactivate.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'ascLocation': self._serialize.url("self.config.asc_location", self.config.asc_location, 'str'),
'alertName': self._serialize.url("alert_name", alert_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
update_subscription_level_alert_state_to_reactivate.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Security/locations/{ascLocation}/alerts/{alertName}/activate'}
def update_resource_group_level_state_to_resolve(
self, alert_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Update the alert's state.
:param alert_name: Name of the alert object
:type alert_name: str
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.update_resource_group_level_state_to_resolve.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'ascLocation': self._serialize.url("self.config.asc_location", self.config.asc_location, 'str'),
'alertName': self._serialize.url("alert_name", alert_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
update_resource_group_level_state_to_resolve.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Security/locations/{ascLocation}/alerts/{alertName}/resolve'}
def update_resource_group_level_alert_state_to_dismiss(
self, alert_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Update the alert's state.
:param alert_name: Name of the alert object
:type alert_name: str
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.update_resource_group_level_alert_state_to_dismiss.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'ascLocation': self._serialize.url("self.config.asc_location", self.config.asc_location, 'str'),
'alertName': self._serialize.url("alert_name", alert_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
update_resource_group_level_alert_state_to_dismiss.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Security/locations/{ascLocation}/alerts/{alertName}/dismiss'}
def update_resource_group_level_alert_state_to_reactivate(
self, alert_name, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Update the alert's state.
:param alert_name: Name of the alert object
:type alert_name: str
:param resource_group_name: The name of the resource group within the
user's subscription. The name is case insensitive.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.update_resource_group_level_alert_state_to_reactivate.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', pattern=r'^[0-9A-Fa-f]{8}-([0-9A-Fa-f]{4}-){3}[0-9A-Fa-f]{12}$'),
'ascLocation': self._serialize.url("self.config.asc_location", self.config.asc_location, 'str'),
'alertName': self._serialize.url("alert_name", alert_name, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
update_resource_group_level_alert_state_to_reactivate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Security/locations/{ascLocation}/alerts/{alertName}/activate'}
| 49.848285
| 227
| 0.660685
| 4,310
| 37,785
| 5.58051
| 0.048028
| 0.038666
| 0.023948
| 0.008981
| 0.958423
| 0.957176
| 0.951231
| 0.941211
| 0.929943
| 0.922834
| 0
| 0.007607
| 0.231071
| 37,785
| 757
| 228
| 49.914135
| 0.820231
| 0.24343
| 0
| 0.847291
| 0
| 0.039409
| 0.183749
| 0.116718
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051724
| false
| 0
| 0.009852
| 0
| 0.12069
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c0c54bee76e5ed8f9221f47356913657ebf8287
| 30,760
|
py
|
Python
|
assets/py/fonts/robotol.py
|
hkdickyko/hkdickyko.github.io
|
bbee50cc1002071c100e9109952fa58966f78a6c
|
[
"MIT"
] | 1
|
2021-01-07T14:48:03.000Z
|
2021-01-07T14:48:03.000Z
|
assets/py/fonts/robotol.py
|
hkdickyko/hkdickyko.github.io
|
bbee50cc1002071c100e9109952fa58966f78a6c
|
[
"MIT"
] | null | null | null |
assets/py/fonts/robotol.py
|
hkdickyko/hkdickyko.github.io
|
bbee50cc1002071c100e9109952fa58966f78a6c
|
[
"MIT"
] | null | null | null |
# Code generated by font_to_py.py.
# Font: Robotol.ttf
# Cmd: ./font_to_py.py -x Robotol.ttf 30 myfont.py
version = '0.33'
def height():
return 31
def baseline():
return 24
def max_width():
return 26
def hmap():
return True
def reverse():
return False
def monospaced():
return False
def min_ch():
return 32
def max_ch():
return 126
_font =\
b'\x0d\x00\x00\x00\x00\x00\x00\x00\x03\xe0\x0f\xf0\x1c\x38\x18\x18'\
b'\x10\x18\x20\x18\x00\x18\x00\x30\x00\x60\x00\xe0\x01\x80\x03\x00'\
b'\x02\x00\x06\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00'\
b'\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x06\x00\x00\x00\x00\x0c\x0c\x0c\x0c\x08\x18\x18\x18\x18\x10'\
b'\x10\x30\x30\x30\x30\x00\x00\x00\x00\x60\x60\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\x00\x00\x09\x80\x09\x00\x09\x00\x19\x00'\
b'\x1b\x00\x13\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x61\x80'\
b'\x00\x43\x00\x00\xc3\x00\x00\x82\x00\x00\x86\x00\x1f\xff\xc0\x1f'\
b'\xff\x80\x01\x0c\x00\x03\x08\x00\x02\x08\x00\x02\x18\x00\x06\x10'\
b'\x00\x04\x30\x00\x7f\xfe\x00\x7f\xfe\x00\x08\x20\x00\x08\x60\x00'\
b'\x18\x40\x00\x10\x40\x00\x10\xc0\x00\x30\x80\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x10\x00\x00\x20\x00\x20\x00\x20\x01\xf0\x03\xfc\x06\x0c\x0c'\
b'\x06\x08\x06\x08\x06\x08\x00\x0c\x00\x0e\x00\x07\x80\x01\xe0\x00'\
b'\x70\x00\x18\x00\x08\x00\x0c\x60\x0c\x60\x0c\x30\x18\x38\x78\x1f'\
b'\xf0\x0f\xc0\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x15\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x80\x00\x0f'\
b'\xc0\x00\x1c\x60\x80\x18\x61\x80\x10\x61\x00\x10\x62\x00\x10\x66'\
b'\x00\x18\xcc\x00\x0f\xd8\x00\x07\x10\x00\x00\x30\x00\x00\x67\x80'\
b'\x00\xcf\xc0\x00\x8c\x60\x01\x18\x60\x03\x10\x60\x06\x10\x60\x04'\
b'\x10\x60\x08\x18\xc0\x00\x0f\xc0\x00\x07\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x11\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xe0\x00\x03\xf8'\
b'\x00\x06\x18\x00\x06\x08\x00\x0c\x08\x00\x0c\x18\x00\x0c\x30\x00'\
b'\x06\x60\x00\x07\xc0\x00\x03\x00\x00\x0f\x00\x00\x19\x80\x00\x30'\
b'\xc3\x00\x60\x42\x00\x60\x66\x00\x60\x36\x00\x60\x1c\x00\x60\x18'\
b'\x00\x70\x7c\x00\x3f\xe4\x00\x0f\x86\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05'\
b'\x00\x00\x00\x08\x08\x18\x18\x10\x10\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0b\x00\x00\x00\x00\x20\x00\xc0\x01\x80\x03\x00\x02\x00\x06\x00'\
b'\x04\x00\x0c\x00\x08\x00\x18\x00\x18\x00\x10\x00\x30\x00\x30\x00'\
b'\x30\x00\x30\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00\x20\x00'\
b'\x20\x00\x30\x00\x30\x00\x10\x00\x18\x00\x08\x00\x0c\x00\x04\x00'\
b'\x0b\x00\x00\x00\x04\x00\x0c\x00\x02\x00\x03\x00\x01\x00\x01\x00'\
b'\x01\x80\x01\x80\x01\x80\x00\x80\x00\x80\x01\x80\x01\x80\x01\x80'\
b'\x01\x80\x01\x80\x01\x80\x01\x00\x03\x00\x03\x00\x03\x00\x06\x00'\
b'\x06\x00\x04\x00\x0c\x00\x18\x00\x10\x00\x30\x00\x60\x00\x80\x00'\
b'\x0d\x00\x00\x00\x00\x00\x00\x00\x01\x80\x01\x80\x01\x00\x01\x08'\
b'\x39\x38\x1f\xe0\x03\x00\x07\x80\x0d\x80\x18\xc0\x30\xc0\x00\x80'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x80\x00\x80\x01\x80\x7f\xfe'\
b'\x7f\xfe\x01\x80\x01\x00\x01\x00\x03\x00\x03\x00\x03\x00\x03\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x30\x30\x20\x60\x60\xc0\x80\x00'\
b'\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x7f\x7e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x60\x60\x00\x00\x00\x00'\
b'\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x18\x00'\
b'\x30\x00\x30\x00\x60\x00\x60\x00\xc0\x00\x80\x01\x80\x01\x00\x03'\
b'\x00\x02\x00\x06\x00\x04\x00\x0c\x00\x08\x00\x18\x00\x30\x00\x30'\
b'\x00\x60\x00\x60\x00\xc0\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01\xf0\x03\xf8\x06'\
b'\x0c\x0c\x04\x08\x06\x18\x06\x18\x06\x10\x06\x30\x06\x30\x06\x30'\
b'\x04\x30\x04\x20\x0c\x20\x0c\x20\x0c\x20\x08\x30\x18\x30\x30\x38'\
b'\x70\x1f\xe0\x0f\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x30\x00\xe0\x07'\
b'\xe0\x0e\x60\x08\x60\x00\x60\x00\x60\x00\x40\x00\x40\x00\xc0\x00'\
b'\xc0\x00\xc0\x00\xc0\x00\x80\x00\x80\x01\x80\x01\x80\x01\x80\x01'\
b'\x80\x01\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01\xf0\x07\xf8\x0e'\
b'\x1c\x1c\x0c\x18\x06\x10\x06\x00\x04\x00\x0c\x00\x0c\x00\x18\x00'\
b'\x30\x00\x60\x00\xc0\x01\x80\x03\x00\x06\x00\x0c\x00\x18\x00\x30'\
b'\x00\x7f\xf8\x7f\xf8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01\xf0\x07\xfc\x0e'\
b'\x0c\x0c\x06\x18\x06\x18\x06\x00\x06\x00\x0c\x00\x18\x03\xf0\x03'\
b'\xf0\x00\x38\x00\x18\x00\x0c\x00\x0c\x60\x0c\x60\x18\x30\x18\x38'\
b'\x70\x1f\xe0\x0f\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x1c\x00'\
b'\x3c\x00\x6c\x00\x48\x00\xd8\x01\x98\x03\x18\x06\x18\x04\x10\x0c'\
b'\x10\x18\x30\x30\x30\x60\x30\x7f\xfe\xff\xfe\x00\x20\x00\x60\x00'\
b'\x60\x00\x60\x00\x60\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x03\xff\x03\xff\x06'\
b'\x00\x06\x00\x04\x00\x04\x00\x0c\x00\x0f\xe0\x0f\xf8\x0c\x18\x10'\
b'\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x30\x0c\x30\x0c\x30\x18\x18'\
b'\x38\x0f\xf0\x07\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x3c\x00\xfc\x03'\
b'\xc0\x07\x00\x06\x00\x0c\x00\x18\x00\x19\xe0\x17\xf8\x3c\x18\x38'\
b'\x0c\x30\x0c\x30\x0c\x20\x0c\x20\x0c\x30\x0c\x30\x08\x30\x18\x18'\
b'\x70\x0f\xe0\x07\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x1f\xff\x1f\xff\x00'\
b'\x02\x00\x06\x00\x04\x00\x0c\x00\x18\x00\x10\x00\x30\x00\x60\x00'\
b'\x60\x00\xc0\x00\x80\x01\x80\x03\x00\x03\x00\x06\x00\x04\x00\x0c'\
b'\x00\x18\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01\xf0\x07\xfc\x0e'\
b'\x0e\x0c\x06\x08\x06\x18\x06\x18\x06\x08\x0c\x0e\x1c\x07\xf0\x07'\
b'\xf0\x1c\x38\x30\x08\x20\x0c\x60\x0c\x60\x0c\x60\x0c\x60\x18\x38'\
b'\x78\x1f\xf0\x0f\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01\xf0\x07\xf8\x0e'\
b'\x1c\x0c\x0c\x18\x06\x18\x06\x10\x06\x30\x06\x30\x06\x30\x04\x10'\
b'\x0c\x18\x14\x1c\x3c\x0f\xec\x07\x98\x00\x18\x00\x30\x00\x60\x01'\
b'\xc0\x1f\x80\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18\x18'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x60\x60\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c'\
b'\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x30\x30\x20\x60'\
b'\x60\xc0\x80\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x18\x00\xf0\x03\xc0\x0f'\
b'\x00\x38\x00\x60\x00\x38\x00\x1e\x00\x07\x80\x01\xe0\x00\x70\x00'\
b'\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xfc\x1f'\
b'\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x3f\xf8\x3f\xf8\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x10\x00\x1c\x00\x0f\x00\x03\x80\x00'\
b'\xe0\x00\x38\x00\x0c\x00\x78\x01\xe0\x07\x00\x3c\x00\x70\x00\x40'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\x03\xe0\x0f'\
b'\xf0\x1c\x38\x18\x18\x10\x18\x20\x18\x00\x18\x00\x30\x00\x60\x00'\
b'\xe0\x01\x80\x03\x00\x02\x00\x06\x00\x06\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x0c\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xe0\x00\x00\x7f\xf8\x00\x00'\
b'\xf0\x1c\x00\x01\xc0\x06\x00\x03\x00\x02\x00\x06\x00\x03\x00\x0c'\
b'\x0f\x81\x00\x18\x1f\xc1\x00\x18\x30\xc1\x00\x30\x60\xc1\x00\x30'\
b'\x40\x81\x00\x20\xc0\x81\x00\x20\x80\x81\x00\x61\x81\x83\x00\x61'\
b'\x81\x02\x00\x61\x81\x02\x00\x61\x83\x06\x00\x61\x85\x0c\x00\x60'\
b'\xfd\xf8\x00\x60\x70\xf0\x00\x30\x00\x00\x00\x30\x00\x00\x00\x18'\
b'\x00\x00\x00\x0e\x06\x00\x00\x07\xfe\x00\x00\x01\xf8\x00\x00\x00'\
b'\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18'\
b'\x00\x00\x38\x00\x00\x38\x00\x00\x6c\x00\x00\x6c\x00\x00\xcc\x00'\
b'\x00\xcc\x00\x01\x84\x00\x01\x84\x00\x03\x06\x00\x03\x06\x00\x06'\
b'\x06\x00\x06\x06\x00\x0c\x02\x00\x1f\xff\x00\x1f\xff\x00\x38\x03'\
b'\x00\x30\x03\x00\x60\x01\x00\x60\x01\x80\xc0\x01\x80\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x11\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xf8\x00'\
b'\x0f\xfe\x00\x0c\x06\x00\x0c\x03\x00\x08\x03\x00\x18\x03\x00\x18'\
b'\x03\x00\x18\x06\x00\x18\x0c\x00\x1f\xf8\x00\x1f\xf8\x00\x30\x0c'\
b'\x00\x30\x06\x00\x30\x06\x00\x30\x06\x00\x20\x06\x00\x20\x06\x00'\
b'\x60\x0c\x00\x60\x3c\x00\x7f\xf8\x00\x7f\xe0\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\x00\x03'\
b'\xfe\x00\x07\x07\x00\x0e\x01\x80\x0c\x01\x80\x18\x00\x80\x18\x00'\
b'\x80\x30\x00\x00\x30\x00\x00\x30\x00\x00\x30\x00\x00\x20\x00\x00'\
b'\x20\x00\x00\x20\x00\x00\x20\x03\x00\x30\x03\x00\x30\x06\x00\x38'\
b'\x0e\x00\x1c\x1c\x00\x0f\xf8\x00\x07\xe0\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x13\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xf8\x00\x07\xfe'\
b'\x00\x0c\x07\x00\x0c\x03\x00\x0c\x01\x80\x0c\x01\x80\x0c\x00\x80'\
b'\x08\x00\x80\x18\x00\x80\x18\x00\x80\x18\x01\x80\x18\x01\x80\x10'\
b'\x01\x80\x10\x01\x80\x30\x03\x00\x30\x03\x00\x30\x06\x00\x30\x0c'\
b'\x00\x20\x38\x00\x7f\xf0\x00\x7f\xc0\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xff\x80\x0f\xff\x80'\
b'\x0c\x00\x00\x0c\x00\x00\x0c\x00\x00\x08\x00\x00\x18\x00\x00\x18'\
b'\x00\x00\x18\x00\x00\x1f\xfc\x00\x1f\xfc\x00\x10\x00\x00\x30\x00'\
b'\x00\x30\x00\x00\x30\x00\x00\x30\x00\x00\x20\x00\x00\x20\x00\x00'\
b'\x60\x00\x00\x7f\xfc\x00\x7f\xfc\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xff\x80\x0f\xff\x80\x0c'\
b'\x00\x00\x0c\x00\x00\x0c\x00\x00\x08\x00\x00\x18\x00\x00\x18\x00'\
b'\x00\x18\x00\x00\x1f\xfc\x00\x1f\xfc\x00\x10\x00\x00\x30\x00\x00'\
b'\x30\x00\x00\x30\x00\x00\x30\x00\x00\x30\x00\x00\x20\x00\x00\x60'\
b'\x00\x00\x60\x00\x00\x60\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\x00\x01\xff\x00\x07\x83'\
b'\x80\x06\x01\x80\x0c\x00\xc0\x18\x00\xc0\x18\x00\xc0\x10\x00\x00'\
b'\x30\x00\x00\x30\x00\x00\x30\x00\x00\x30\x3f\x80\x30\x3f\x80\x30'\
b'\x01\x80\x30\x01\x00\x30\x03\x00\x30\x03\x00\x18\x03\x00\x1c\x0f'\
b'\x00\x0f\xfe\x00\x03\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x04\x00\x20\x0c\x00\x60\x0c\x00\x60'\
b'\x0c\x00\x60\x0c\x00\x60\x08\x00\x60\x18\x00\x40\x18\x00\xc0\x18'\
b'\x00\xc0\x1f\xff\xc0\x1f\xff\xc0\x10\x00\xc0\x30\x00\x80\x30\x01'\
b'\x80\x30\x01\x80\x30\x01\x80\x30\x01\x80\x20\x01\x80\x60\x01\x00'\
b'\x60\x03\x00\x60\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00'\
b'\x06\x04\x04\x0c\x0c\x0c\x0c\x08\x08\x18\x18\x18\x18\x10\x30\x30'\
b'\x30\x30\x30\x20\x60\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x02\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06'\
b'\x00\x04\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x08\x00\x18'\
b'\x40\x18\x40\x18\x60\x30\x60\x30\x70\xe0\x3f\xc0\x0f\x80\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x04\x00\xe0\x0c\x01\x80\x0c\x03\x00'\
b'\x0c\x06\x00\x0c\x0c\x00\x08\x18\x00\x08\x30\x00\x18\xe0\x00\x19'\
b'\xc0\x00\x1b\x80\x00\x1f\x80\x00\x1c\xc0\x00\x18\x60\x00\x30\x60'\
b'\x00\x30\x30\x00\x30\x30\x00\x30\x18\x00\x20\x0c\x00\x60\x0c\x00'\
b'\x60\x06\x00\x60\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00'\
b'\x00\x00\x00\x04\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x08\x00\x18'\
b'\x00\x18\x00\x18\x00\x18\x00\x18\x00\x10\x00\x30\x00\x30\x00\x30'\
b'\x00\x30\x00\x20\x00\x20\x00\x60\x00\x7f\xf8\x7f\xf8\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x06\x00\x07\x0e\x00\x07\x0e\x00\x0f\x0f'\
b'\x00\x0a\x0b\x00\x1a\x0b\x00\x36\x19\x00\x36\x19\x80\x66\x19\x80'\
b'\x66\x19\x80\xc4\x18\x80\x8c\x10\x81\x8c\x30\xc3\x0c\x30\xc3\x0c'\
b'\x30\x46\x08\x30\x46\x08\x30\x6c\x18\x20\x68\x18\x60\x78\x18\x60'\
b'\x30\x18\x60\x30\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x0c\x00\x20\x0e\x00\x60\x0e\x00\x60\x0f\x00'\
b'\x60\x0f\x00\x60\x09\x80\x60\x19\x80\x40\x18\xc0\xc0\x18\xc0\xc0'\
b'\x18\x60\xc0\x18\x60\xc0\x10\x30\xc0\x30\x30\x80\x30\x19\x80\x30'\
b'\x19\x80\x30\x09\x80\x20\x0d\x80\x20\x05\x80\x60\x07\x00\x60\x03'\
b'\x00\x60\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xfc\x00\x03\xfe\x00\x07\x87\x00\x0e\x01\x80'\
b'\x0c\x01\x80\x18\x00\xc0\x18\x00\xc0\x10\x00\xc0\x30\x00\xc0\x30'\
b'\x00\xc0\x30\x00\xc0\x30\x00\x80\x20\x01\x80\x20\x01\x80\x20\x01'\
b'\x80\x30\x03\x00\x30\x03\x00\x38\x06\x00\x1c\x1c\x00\x0f\xf8\x00'\
b'\x03\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x11\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x0f\xfc\x00\x0f\xfe\x00\x0c\x07\x00\x0c\x03\x80\x08'\
b'\x01\x80\x18\x01\x80\x18\x01\x80\x18\x01\x80\x18\x03\x00\x18\x03'\
b'\x00\x10\x0e\x00\x3f\xfc\x00\x3f\xf0\x00\x30\x00\x00\x30\x00\x00'\
b'\x30\x00\x00\x20\x00\x00\x60\x00\x00\x60\x00\x00\x60\x00\x00\x60'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xfc\x00\x03\xfe\x00\x07\x87\x00\x0e\x01\x80\x0c\x01'\
b'\x80\x18\x00\xc0\x18\x00\xc0\x30\x00\xc0\x30\x00\xc0\x30\x00\xc0'\
b'\x30\x00\xc0\x20\x00\x80\x20\x01\x80\x20\x01\x80\x20\x01\x80\x30'\
b'\x03\x00\x30\x07\x00\x38\x06\x00\x1c\x1c\x00\x0f\xf8\x00\x07\xf0'\
b'\x00\x00\x18\x00\x00\x0c\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x0f\xfc\x00\x0f\xfe\x00\x0c\x07\x00\x0c\x03\x80\x0c\x01\x80'\
b'\x08\x01\x80\x18\x01\x80\x18\x01\x80\x18\x03\x00\x18\x03\x00\x18'\
b'\x0e\x00\x1f\xfc\x00\x3f\xf0\x00\x30\x30\x00\x30\x18\x00\x30\x18'\
b'\x00\x30\x08\x00\x20\x0c\x00\x60\x0c\x00\x60\x06\x00\x60\x06\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x11\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\xf8\x00\x07\xfe\x00\x0f\x07\x00\x0c\x03\x00\x18\x01\x80\x18'\
b'\x01\x80\x18\x00\x00\x1c\x00\x00\x0f\x00\x00\x07\xc0\x00\x01\xf0'\
b'\x00\x00\x3c\x00\x00\x0e\x00\x00\x06\x00\x00\x06\x00\x60\x06\x00'\
b'\x60\x06\x00\x60\x0c\x00\x38\x1c\x00\x1f\xf8\x00\x07\xe0\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f'\
b'\xff\xc0\x1f\xff\xc0\x00\x60\x00\x00\x60\x00\x00\x60\x00\x00\x60'\
b'\x00\x00\x40\x00\x00\xc0\x00\x00\xc0\x00\x00\xc0\x00\x00\xc0\x00'\
b'\x00\x80\x00\x00\x80\x00\x01\x80\x00\x01\x80\x00\x01\x80\x00\x01'\
b'\x80\x00\x01\x00\x00\x03\x00\x00\x03\x00\x00\x03\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00'\
b'\x60\x0c\x00\x40\x0c\x00\x40\x0c\x00\xc0\x0c\x00\xc0\x08\x00\xc0'\
b'\x18\x00\xc0\x18\x00\x80\x18\x01\x80\x18\x01\x80\x10\x01\x80\x10'\
b'\x01\x80\x30\x01\x80\x30\x01\x00\x30\x03\x00\x30\x03\x00\x30\x07'\
b'\x00\x18\x06\x00\x1c\x1c\x00\x0f\xf8\x00\x03\xe0\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x13\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x30\x00\x60'\
b'\x10\x00\xc0\x18\x00\xc0\x18\x01\x80\x18\x01\x80\x18\x03\x00\x08'\
b'\x03\x00\x0c\x06\x00\x0c\x06\x00\x0c\x0c\x00\x0c\x0c\x00\x04\x18'\
b'\x00\x04\x18\x00\x06\x30\x00\x06\x30\x00\x06\x60\x00\x02\x60\x00'\
b'\x02\xc0\x00\x03\xc0\x00\x03\x80\x00\x03\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18'\
b'\x02\x00\xc0\x18\x07\x00\xc0\x18\x07\x00\x80\x18\x0f\x01\x80\x18'\
b'\x0b\x01\x80\x18\x19\x03\x00\x18\x19\x03\x00\x18\x11\x02\x00\x18'\
b'\x31\x06\x00\x08\x21\x86\x00\x08\x61\x84\x00\x08\x41\x8c\x00\x0c'\
b'\xc1\x88\x00\x0c\xc0\x98\x00\x0d\x80\x98\x00\x0d\x80\x90\x00\x0d'\
b'\x00\xb0\x00\x0f\x00\xa0\x00\x0e\x00\xe0\x00\x0e\x00\xe0\x00\x06'\
b'\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x60\x06\x00\xc0'\
b'\x06\x01\x80\x03\x03\x00\x03\x03\x00\x01\x06\x00\x01\x8c\x00\x00'\
b'\x98\x00\x00\xf8\x00\x00\x70\x00\x00\x60\x00\x00\xe0\x00\x01\xb0'\
b'\x00\x03\x10\x00\x03\x18\x00\x06\x08\x00\x0c\x0c\x00\x18\x0c\x00'\
b'\x30\x06\x00\x30\x06\x00\xe0\x03\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x38\x00\xe0\x18\x01\xc0\x18'\
b'\x01\x80\x0c\x03\x00\x0c\x06\x00\x04\x06\x00\x06\x0c\x00\x06\x18'\
b'\x00\x02\x18\x00\x03\x30\x00\x03\x60\x00\x01\xc0\x00\x01\xc0\x00'\
b'\x01\x80\x00\x01\x80\x00\x01\x80\x00\x01\x80\x00\x01\x00\x00\x03'\
b'\x00\x00\x03\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0f\xff\x80\x1f\xff\x80\x00\x03'\
b'\x00\x00\x03\x00\x00\x06\x00\x00\x0c\x00\x00\x18\x00\x00\x30\x00'\
b'\x00\x60\x00\x00\xc0\x00\x01\xc0\x00\x01\x80\x00\x03\x00\x00\x06'\
b'\x00\x00\x0c\x00\x00\x18\x00\x00\x30\x00\x00\x60\x00\x00\xe0\x00'\
b'\x00\xff\xfc\x00\xff\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x0f\x80'\
b'\x0f\x80\x0c\x00\x0c\x00\x0c\x00\x08\x00\x08\x00\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\x18\x00\x10\x00\x30\x00\x30\x00\x30\x00\x30\x00'\
b'\x30\x00\x20\x00\x20\x00\x60\x00\x60\x00\x60\x00\x60\x00\x40\x00'\
b'\x40\x00\xf8\x00\xf8\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00'\
b'\x00\x00\x00\x00\x10\x00\x18\x00\x18\x00\x18\x00\x08\x00\x08\x00'\
b'\x0c\x00\x0c\x00\x0c\x00\x04\x00\x04\x00\x06\x00\x06\x00\x06\x00'\
b'\x02\x00\x02\x00\x03\x00\x03\x00\x03\x00\x01\x00\x01\x00\x01\x80'\
b'\x01\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x0f\x80'\
b'\x0f\x80\x01\x80\x01\x00\x01\x00\x03\x00\x03\x00\x03\x00\x03\x00'\
b'\x02\x00\x02\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x04\x00'\
b'\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x08\x00\x18\x00\x18\x00'\
b'\x18\x00\xf8\x00\xf8\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00'\
b'\x00\x00\x00\x00\x01\x80\x03\x80\x02\x80\x06\x80\x04\xc0\x0c\xc0'\
b'\x18\x40\x18\x40\x30\x60\x60\x60\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xf8'\
b'\xff\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00'\
b'\x0c\x0c\x06\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03'\
b'\xe0\x0f\xf0\x1c\x38\x18\x18\x30\x18\x00\x18\x00\x18\x07\xf8\x3f'\
b'\xf8\x78\x10\x60\x10\x40\x30\x40\x30\x60\xf0\x7f\xb0\x1e\x30\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00'\
b'\x00\x00\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x08\x00\x08\x00\x19'\
b'\xf0\x1b\xf8\x1e\x18\x18\x0c\x18\x0c\x10\x0c\x30\x04\x30\x0c\x30'\
b'\x0c\x30\x0c\x20\x0c\x20\x18\x70\x18\x78\x70\x7f\xe0\x47\xc0\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03'\
b'\xe0\x0f\xf0\x1c\x18\x18\x0c\x30\x0c\x30\x04\x20\x00\x60\x00\x60'\
b'\x00\x60\x00\x60\x00\x20\x18\x30\x18\x38\x70\x1f\xe0\x0f\x80\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00'\
b'\x00\x00\x00\x00\x03\x00\x03\x00\x02\x00\x02\x00\x06\x00\x06\x03'\
b'\xe6\x0f\xf6\x1c\x1c\x18\x0c\x30\x0c\x30\x0c\x20\x0c\x60\x0c\x60'\
b'\x08\x60\x18\x60\x18\x60\x18\x30\x38\x38\x78\x1f\xd0\x0f\x90\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03'\
b'\xe0\x07\xf0\x0c\x38\x18\x18\x30\x0c\x30\x0c\x20\x0c\x7f\xfc\x7f'\
b'\xfc\x60\x00\x60\x00\x60\x00\x30\x20\x38\x30\x1f\xe0\x07\xc0\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00'\
b'\x00\x00\x00\x01\xe0\x03\xe0\x07\x00\x06\x00\x0c\x00\x0c\x00\x3f'\
b'\x80\x3f\x80\x4c\x00\x08\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18'\
b'\x00\x10\x00\x30\x00\x30\x00\x30\x00\x30\x00\x20\x00\x60\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03'\
b'\xe6\x0f\xf6\x1c\x1c\x18\x0c\x30\x0c\x30\x0c\x30\x0c\x20\x0c\x60'\
b'\x08\x60\x18\x60\x18\x60\x18\x30\x38\x38\x78\x1f\xd0\x0f\xb0\x00'\
b'\x30\x00\x30\x40\x60\xe0\xe0\x7f\xc0\x1f\x00\x00\x00\x10\x00\x00'\
b'\x00\x00\x00\x04\x00\x0c\x00\x0c\x00\x0c\x00\x08\x00\x08\x00\x09'\
b'\xf0\x1b\xf8\x1e\x18\x1c\x0c\x18\x0c\x10\x0c\x30\x0c\x30\x08\x30'\
b'\x08\x30\x18\x30\x18\x20\x18\x60\x18\x60\x18\x60\x10\x60\x30\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00'\
b'\x00\x00\x0c\x0c\x00\x00\x00\x18\x18\x18\x18\x10\x10\x30\x30\x30'\
b'\x30\x20\x20\x60\x60\x60\x60\x00\x00\x00\x00\x00\x00\x00\x0b\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\xc0\x00\xc0\x00\x00\x00\x00\x00\x00'\
b'\x00\x80\x01\x80\x01\x80\x01\x80\x01\x80\x01\x00\x01\x00\x03\x00'\
b'\x03\x00\x03\x00\x03\x00\x02\x00\x02\x00\x06\x00\x06\x00\x06\x00'\
b'\x06\x00\x04\x00\x0c\x00\x1c\x00\xf8\x00\xf0\x00\x00\x00\x0e\x00'\
b'\x00\x00\x00\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x08\x00\x18\x00'\
b'\x18\x0c\x18\x38\x18\x70\x10\xe0\x11\x80\x33\x00\x36\x00\x3e\x00'\
b'\x3b\x00\x31\x00\x21\x80\x60\xc0\x60\xc0\x60\x60\x60\x60\x40\x30'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00'\
b'\x00\x00\x04\x0c\x0c\x0c\x0c\x08\x18\x18\x18\x18\x18\x10\x30\x30'\
b'\x30\x30\x20\x20\x60\x60\x60\x60\x00\x00\x00\x00\x00\x00\x00\x19'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x19\xf0\x78\x00\x1b\xf9\xfc\x00\x16\x1b\x0e\x00\x18\x0e\x06'\
b'\x00\x30\x0c\x06\x00\x30\x0c\x06\x00\x30\x08\x06\x00\x30\x08\x06'\
b'\x00\x30\x18\x06\x00\x20\x18\x04\x00\x60\x18\x04\x00\x60\x18\x0c'\
b'\x00\x60\x10\x0c\x00\x60\x10\x0c\x00\x40\x30\x0c\x00\x40\x30\x0c'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x19'\
b'\xf0\x1b\xf8\x1e\x18\x1c\x0c\x18\x0c\x10\x0c\x30\x0c\x30\x08\x30'\
b'\x08\x30\x18\x30\x18\x20\x18\x60\x18\x60\x18\x60\x10\x60\x30\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03'\
b'\xe0\x0f\xf8\x1c\x18\x18\x0c\x30\x0c\x20\x04\x60\x04\x60\x04\x60'\
b'\x04\x60\x0c\x60\x0c\x60\x18\x30\x18\x38\x70\x1f\xe0\x0f\x80\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x19'\
b'\xf0\x1b\xf8\x1e\x18\x18\x0c\x18\x0c\x30\x0c\x30\x0c\x30\x0c\x30'\
b'\x0c\x30\x0c\x20\x0c\x60\x18\x70\x38\x78\x70\x7f\xe0\x67\xc0\x40'\
b'\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\xc0\x00\x00\x00\x10\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03'\
b'\xe6\x0f\xf6\x1c\x1c\x18\x0c\x30\x0c\x30\x0c\x20\x0c\x60\x0c\x60'\
b'\x08\x60\x18\x60\x18\x60\x18\x30\x38\x38\x78\x1f\xd0\x0f\x90\x00'\
b'\x30\x00\x30\x00\x30\x00\x30\x00\x20\x00\x60\x00\x00\x0a\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x19'\
b'\xc0\x1b\xc0\x1e\x00\x1c\x00\x18\x00\x10\x00\x30\x00\x30\x00\x30'\
b'\x00\x30\x00\x20\x00\x20\x00\x60\x00\x60\x00\x60\x00\x60\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07'\
b'\xc0\x0f\xf0\x18\x30\x30\x18\x30\x18\x38\x00\x1e\x00\x0f\xc0\x01'\
b'\xe0\x00\x70\x00\x30\x40\x30\x60\x30\x70\xe0\x3f\xc0\x1f\x80\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x04\x00\x04\x00\x0c\x00\x0c\x00\x7f'\
b'\x80\x7f\x80\x08\x00\x08\x00\x18\x00\x18\x00\x18\x00\x18\x00\x10'\
b'\x00\x10\x00\x30\x00\x30\x00\x30\x00\x30\x00\x3e\x00\x1e\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08'\
b'\x06\x18\x06\x18\x06\x18\x04\x18\x0c\x10\x0c\x10\x0c\x30\x0c\x30'\
b'\x0c\x30\x08\x30\x18\x30\x18\x30\x38\x38\x78\x1f\xd8\x0f\x90\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x30'\
b'\x0c\x30\x0c\x30\x18\x30\x18\x10\x30\x10\x30\x18\x60\x18\x60\x18'\
b'\xc0\x08\xc0\x09\x80\x0d\x80\x0f\x00\x0f\x00\x06\x00\x06\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x15\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x30\x18\x18\x30\x38\x18\x30\x38\x10'\
b'\x30\x78\x30\x10\x48\x30\x10\xc8\x60\x10\xc8\x60\x10\x88\x40\x11'\
b'\x88\xc0\x19\x0c\x80\x1b\x0d\x80\x1a\x0d\x00\x1e\x07\x00\x1c\x07'\
b'\x00\x0c\x06\x00\x08\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18\x0e'\
b'\x0c\x0c\x0c\x18\x04\x30\x06\x60\x02\x60\x03\xc0\x01\x80\x03\x80'\
b'\x06\x80\x06\xc0\x0c\x40\x18\x60\x30\x20\x70\x30\xe0\x18\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18\x06'\
b'\x18\x06\x18\x0c\x08\x0c\x08\x18\x0c\x18\x0c\x30\x0c\x30\x04\x60'\
b'\x04\x60\x06\xc0\x06\xc0\x07\x80\x03\x00\x03\x00\x02\x00\x06\x00'\
b'\x04\x00\x0c\x00\x18\x00\xf8\x00\xe0\x00\x00\x00\x0e\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\xfc'\
b'\x3f\xf8\x00\x18\x00\x30\x00\x60\x00\xc0\x01\x80\x03\x00\x06\x00'\
b'\x0c\x00\x18\x00\x30\x00\x60\x00\x60\x00\xff\xf0\xff\xf0\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00'\
b'\x00\x60\x00\xc0\x01\x80\x03\x00\x02\x00\x06\x00\x06\x00\x06\x00'\
b'\x06\x00\x04\x00\x0c\x00\x0c\x00\x0c\x00\x18\x00\x70\x00\x70\x00'\
b'\x30\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x10\x00\x10\x00'\
b'\x10\x00\x18\x00\x18\x00\x0c\x00\x06\x00\x00\x00\x06\x00\x00\x00'\
b'\x00\x0c\x0c\x08\x08\x08\x18\x18\x18\x10\x10\x10\x10\x30\x30\x30'\
b'\x20\x20\x20\x60\x60\x60\x40\x40\x40\xc0\x00\x00\x00\x0b\x00\x00'\
b'\x00\x08\x00\x06\x00\x02\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03'\
b'\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x80\x01\xc0\x01'\
b'\xc0\x03\x00\x06\x00\x04\x00\x04\x00\x0c\x00\x0c\x00\x0c\x00\x0c'\
b'\x00\x08\x00\x18\x00\x30\x00\x60\x00\x80\x00\x00\x00\x13\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x0f\x00\x80\x1f\xc0\x80\x30\xe1\x80\x20'\
b'\x7f\x00\x60\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
_index =\
b'\x00\x00\x40\x00\x61\x00\x82\x00\xc2\x00\x21\x01\x61\x01\xc0\x01'\
b'\x1f\x02\x40\x02\x80\x02\xc0\x02\x00\x03\x40\x03\x61\x03\x82\x03'\
b'\xa3\x03\xe3\x03\x23\x04\x63\x04\xa3\x04\xe3\x04\x23\x05\x63\x05'\
b'\xa3\x05\xe3\x05\x23\x06\x63\x06\x84\x06\xa5\x06\xe5\x06\x25\x07'\
b'\x65\x07\xa5\x07\x23\x08\x82\x08\xe1\x08\x40\x09\x9f\x09\xfe\x09'\
b'\x5d\x0a\xbc\x0a\x1b\x0b\x3c\x0b\x7c\x0b\xdb\x0b\x1b\x0c\x7a\x0c'\
b'\xd9\x0c\x38\x0d\x97\x0d\xf6\x0d\x55\x0e\xb4\x0e\x13\x0f\x72\x0f'\
b'\xd1\x0f\x4f\x10\xae\x10\x0d\x11\x6c\x11\xac\x11\xec\x11\x2c\x12'\
b'\x6c\x12\xac\x12\xcd\x12\x0d\x13\x4d\x13\x8d\x13\xcd\x13\x0d\x14'\
b'\x4d\x14\x8d\x14\xcd\x14\xee\x14\x2e\x15\x6e\x15\x8f\x15\x0d\x16'\
b'\x4d\x16\x8d\x16\xcd\x16\x0d\x17\x4d\x17\x8d\x17\xcd\x17\x0d\x18'\
b'\x4d\x18\xac\x18\xec\x18\x2c\x19\x6c\x19\xac\x19\xcd\x19\x0d\x1a'\
b'\x6c\x1a'
_mvfont = memoryview(_font)
_mvi = memoryview(_index)
ifb = lambda l : l[0] | (l[1] << 8)
def get_ch(ch):
oc = ord(ch)
ioff = 2 * (oc - 32 + 1) if oc >= 32 and oc <= 126 else 0
doff = ifb(_mvi[ioff : ])
width = ifb(_mvfont[doff : ])
next_offs = doff + 2 + ((width - 1)//8 + 1) * 31
return _mvfont[doff + 2:next_offs], 31, width
| 63.6853
| 68
| 0.707315
| 7,514
| 30,760
| 2.893
| 0.024488
| 0.667403
| 0.825145
| 0.946729
| 0.758349
| 0.706735
| 0.65374
| 0.617812
| 0.583402
| 0.562379
| 0
| 0.426386
| 0.020481
| 30,760
| 482
| 69
| 63.817427
| 0.295088
| 0.003218
| 0
| 0.152688
| 1
| 0.935484
| 0.907982
| 0.90759
| 0
| 1
| 0
| 0
| 0
| 1
| 0.019355
| false
| 0
| 0
| 0.017204
| 0.03871
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
3c30821a09ac765ea4d55bdc1822d2aca5787924
| 23,845
|
py
|
Python
|
test/test_fixes_ref.py
|
psg-mit/latexpp
|
0b7b523c9ce147c2d34cc430b1abd39972e33fa9
|
[
"MIT"
] | 4
|
2020-08-28T18:41:48.000Z
|
2021-11-11T11:23:58.000Z
|
test/test_fixes_ref.py
|
psg-mit/latexpp
|
0b7b523c9ce147c2d34cc430b1abd39972e33fa9
|
[
"MIT"
] | 4
|
2020-07-31T07:34:38.000Z
|
2021-11-23T19:05:00.000Z
|
test/test_fixes_ref.py
|
psg-mit/latexpp
|
0b7b523c9ce147c2d34cc430b1abd39972e33fa9
|
[
"MIT"
] | 1
|
2020-07-22T02:44:48.000Z
|
2020-07-22T02:44:48.000Z
|
import os
import os.path
import unittest
import helpers
from latexpp.fixes import ref
hyperref_aux_preamble = r"""\relax
\providecommand\hyper@newdestlabel[2]{}
\providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
\HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
\global\let\oldcontentsline\contentsline
\gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
\global\let\oldnewlabel\newlabel
\gdef\newlabel#1#2{\newlabelxx{#1}#2}
\gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
\AtEndDocument{\ifx\hyper@anchor\@undefined
\let\contentsline\oldcontentsline
\let\newlabel\oldnewlabel
\fi}
\fi}
\global\let\hyper@last\relax
\gdef\HyperFirstAtBeginDocument#1{#1}
\providecommand\HyField@AuxAddToFields[1]{}
\providecommand\HyField@AuxAddToCoFields[2]{}
"""
@unittest.skipIf(os.environ.get("TRAVIS", "") == "true",
"Skipping these tests on Travis CI because you'd need to run latex.")
class TestExpandRefs(unittest.TestCase):
maxDiff = None
def setUp(self):
# for possible location of crossreftools.sty and/or cleveref.sty in this
# directory (test/*.sty)
self.old_texinputs = os.environ.get('TEXINPUTS', '')
os.environ['TEXINPUTS'] = os.path.realpath(os.path.dirname(__file__)) \
+ ":" + self.old_texinputs + ":"
def tearDown(self):
os.environ['TEXINPUTS'] = self.old_texinputs
def test_flags_ref_types(self):
fix = ref.ExpandRefs(only_ref_types='ref')
self.assertEqual(fix.ref_types, ['ref'])
fix = ref.ExpandRefs(only_ref_types='cleveref')
self.assertEqual(fix.ref_types, ['cleveref'])
fix = ref.ExpandRefs(only_ref_types=['ref', 'ams-eqref'])
self.assertEqual(fix.ref_types, ['ref', 'ams-eqref'])
fix = ref.ExpandRefs(only_ref_types=set(['ref', 'ams-eqref']))
self.assertEqual(set(fix.ref_types), set(['ref', 'ams-eqref']))
def test_simple_ref(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\begin{document}
Equation~(\ref{eq:test}) on page~\pageref{eq:test} reads:
\begin{equation}
\label{eq:test}
a + b = c\ .
\end{equation}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(only_ref_types='ref', debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: r"""
\relax
\newlabel{eq:test}{{1}{1}}
"""
lpp.install_fix( fix )
# NOTE: KEEP \protect's in output, because the substitution might happen
# somewhere fragile.
self.assertEqual(
lpp.execute(latex),
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\begin{document}
Equation~(1) on page~1 reads:
\begin{equation}
\label{eq:test}
a + b = c\ .
\end{equation}
\end{document}
"""
)
def test_simple_ref_with_hyperref(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
Equation~(\ref{eq:test}) [Eq.~(\ref*{eq:test}) without link]:
\begin{equation}
\label{eq:test}
a + b = c\ .
\end{equation}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(only_ref_types='ref', debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: hyperref_aux_preamble + r"""
\newlabel{eq:test}{{1}{1}{}{equation.0.1}{}}
"""
lpp.install_fix( fix )
# NOTE: KEEP \protect's in output, because the substitution might happen
# somewhere fragile.
self.assertEqual(
lpp.execute(latex),
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
Equation~(\protect \hyperref [eq:test]{1}) [Eq.~(1) without link]:
\begin{equation}
\label{eq:test}
a + b = c\ .
\end{equation}
\end{document}
"""
)
def test_simple_ref_with_hyperref_nolink(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
Equation~(\ref{eq:test}) [Eq.~(\ref*{eq:test}) without link]:
\begin{equation}
\label{eq:test}
a + b = c\ .
\end{equation}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(only_ref_types='ref', make_hyperlinks=False, debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: hyperref_aux_preamble + r"""
\newlabel{eq:test}{{1}{1}{}{equation.0.1}{}}
"""
lpp.install_fix( fix )
# NOTE: KEEP \protect's in output, because the substitution might happen
# somewhere fragile.
self.assertEqual(
lpp.execute(latex),
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
Equation~(1) [Eq.~(1) without link]:
\begin{equation}
\label{eq:test}
a + b = c\ .
\end{equation}
\end{document}
"""
)
def test_simple_eqref(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsmath}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\begin{document}
Equation~\eqref{eq:test} reads:
\begin{equation}
\label{eq:test}
a + b = c\ .
\end{equation}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(only_ref_types='ams-eqref', debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: r"""
\relax
\newlabel{eq:test}{{1}{1}}
"""
lpp.install_fix( fix )
# NOTE: KEEP \protect's in output, because the substitution might happen
# somewhere fragile.
self.assertEqual(
lpp.execute(latex),
r"""
\documentclass[11pt]{article}
\usepackage{amsmath}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\begin{document}
Equation~\protect \textup {\mathsurround \z@ \protect \normalfont (\ignorespaces 1\unskip \@@italiccorr )} reads:
\begin{equation}
\label{eq:test}
a + b = c\ .
\end{equation}
\end{document}
"""
)
def test_simple_eqref_with_hyperref(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsmath}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
Equation~\eqref{eq:test}:
\begin{equation}
\label{eq:test}
a + b = c\ .
\end{equation}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(only_ref_types='ams-eqref', debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: hyperref_aux_preamble + r"""
\newlabel{eq:test}{{1}{1}{}{equation.0.1}{}}
"""
lpp.install_fix( fix )
# NOTE: KEEP \protect's in output, because the substitution might happen
# somewhere fragile.
self.assertEqual(
lpp.execute(latex),
r"""
\documentclass[11pt]{article}
\usepackage{amsmath}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
Equation~\protect \textup {\mathsurround \z@ \protect \normalfont (\ignorespaces \protect \hyperref [eq:test]{1}\unskip \@@italiccorr )}:
\begin{equation}
\label{eq:test}
a + b = c\ .
\end{equation}
\end{document}
"""
)
def test_simple_cref(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\usepackage{cleveref}
\begin{document}
A reference to \cref{lemma:test}. \Cref{lemma:test2} on
\cpageref{lemma:test2} is nice, too. \Cpageref{lemma:test2}
is what that was.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: hyperref_aux_preamble + r"""
\newlabel{lemma:test}{{1}{1}{}{lemma.1}{}}
\newlabel{lemma:test@cref}{{[lemma][1][]1}{1}}
\newlabel{lemma:test2}{{2}{1}{}{lemma.2}{}}
\newlabel{lemma:test2@cref}{{[lemma][2][]2}{1}}
"""
lpp.install_fix( fix )
self.assertEqual(
lpp.execute(latex),
# NOTE: KEEP \protect in output, because the substitution might
# happen somewhere fragile.
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
A reference to lemma\protect \nobreakspace \protect \hyperlink {lemma.1}{1}. Lemma\protect \nobreakspace \protect \hyperlink {lemma.2}{2} on
page\protect \nobreakspace \protect \hyperlink {lemma.2}{1} is nice, too. Page\protect \nobreakspace \protect \hyperlink {lemma.2}{1}
is what that was.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
)
def test_simple_cref_nolink(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\usepackage{cleveref}
\begin{document}
A reference to \cref{lemma:test}. \Cref{lemma:test2} on
\cpageref{lemma:test2} is nice, too. \Cpageref{lemma:test2}
is what that was.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(make_hyperlinks=False, debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: hyperref_aux_preamble + r"""
\newlabel{lemma:test}{{1}{1}{}{lemma.1}{}}
\newlabel{lemma:test@cref}{{[lemma][1][]1}{1}}
\newlabel{lemma:test2}{{2}{1}{}{lemma.2}{}}
\newlabel{lemma:test2@cref}{{[lemma][2][]2}{1}}
"""
lpp.install_fix( fix )
self.assertEqual(
lpp.execute(latex),
# NOTE: KEEP \protect in output, because the substitution might
# happen somewhere fragile.
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
A reference to lemma\protect \nobreakspace 1. Lemma\protect \nobreakspace 2 on
page\protect \nobreakspace 1 is nice, too. Page\protect \nobreakspace 1
is what that was.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
)
def test_simple_cref_nohyperrefloaded(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{cleveref}
\begin{document}
A reference to \cref{lemma:test}. \Cref{lemma:test2} on
\cpageref{lemma:test2} is nice, too. \Cpageref{lemma:test2}
is what that was.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(make_hyperlinks=True, debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: r"""
\newlabel{lemma:test}{{1}{1}}
\newlabel{lemma:test@cref}{{[lemma][1][]1}{1}}
\newlabel{lemma:test2}{{2}{1}}
\newlabel{lemma:test2@cref}{{[lemma][2][]2}{1}}
"""
lpp.install_fix( fix )
self.assertEqual(
lpp.execute(latex),
# NOTE: KEEP \protect in output, because the substitution might
# happen somewhere fragile.
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\begin{document}
A reference to lemma\protect \nobreakspace 1. Lemma\protect \nobreakspace 2 on
page\protect \nobreakspace 1 is nice, too. Page\protect \nobreakspace 1
is what that was.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
)
def test_simple_crefrange(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\usepackage{cleveref}
\begin{document}
Ranges should work, too, like this:
\crefrange{lemma:test}{lemma:test2} on
\cpagerefrange{lemma:test}{lemma:test2}, as well as their
capitalized versions: \Crefrange{lemma:test}{lemma:test2};
\Cpagerefrange{lemma:test}{lemma:test2}.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
Here is an equation
\begin{equation}
\label{eq:hello}
a + b = c \ .
\end{equation}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: hyperref_aux_preamble + r"""
\newlabel{lemma:test}{{1}{1}{}{lemma.1}{}}
\newlabel{lemma:test@cref}{{[lemma][1][]1}{1}}
\newlabel{lemma:test2}{{2}{1}{}{lemma.2}{}}
\newlabel{lemma:test2@cref}{{[lemma][2][]2}{1}}
"""
lpp.install_fix( fix )
self.assertEqual(
lpp.execute(latex),
# NOTE: KEEP \protect in output, because the substitution might
# happen somewhere fragile.
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
Ranges should work, too, like this:
lemmas\protect \nobreakspace \protect \hyperlink {lemma.1}{1} to\protect \nobreakspace \protect \hyperlink {lemma.2}{2} on
pages\protect \nobreakspace \protect \hyperlink {lemma.1}{1} to\protect \nobreakspace \protect \hyperlink {lemma.2}{1}, as well as their
capitalized versions: Lemmas\protect \nobreakspace \protect \hyperlink {lemma.1}{1} to\protect \nobreakspace \protect \hyperlink {lemma.2}{2};
Pages\protect \nobreakspace \protect \hyperlink {lemma.1}{1} to\protect \nobreakspace \protect \hyperlink {lemma.2}{1}.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
Here is an equation
\begin{equation}
\label{eq:hello}
a + b = c \ .
\end{equation}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
)
def test_simple_crefrange_nolink(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\usepackage{cleveref}
\begin{document}
Ranges should work, too, like this:
\crefrange{lemma:test}{lemma:test2} on
\cpagerefrange{lemma:test}{lemma:test2}, as well as their
capitalized versions: \Crefrange{lemma:test}{lemma:test2};
\Cpagerefrange{lemma:test}{lemma:test2}.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
Here is an equation
\begin{equation}
\label{eq:hello}
a + b = c \ .
\end{equation}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(make_hyperlinks=False, debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: hyperref_aux_preamble + r"""
\newlabel{lemma:test}{{1}{1}{}{lemma.1}{}}
\newlabel{lemma:test@cref}{{[lemma][1][]1}{1}}
\newlabel{lemma:test2}{{2}{1}{}{lemma.2}{}}
\newlabel{lemma:test2@cref}{{[lemma][2][]2}{1}}
"""
lpp.install_fix( fix )
self.assertEqual(
lpp.execute(latex),
# NOTE: KEEP \protect in output, because the substitution might
# happen somewhere fragile.
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
Ranges should work, too, like this:
lemmas\protect \nobreakspace 1 to\protect \nobreakspace 2 on
pages\protect \nobreakspace 1 to\protect \nobreakspace 1, as well as their
capitalized versions: Lemmas\protect \nobreakspace 1 to\protect \nobreakspace 2;
Pages\protect \nobreakspace 1 to\protect \nobreakspace 1.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
Here is an equation
\begin{equation}
\label{eq:hello}
a + b = c \ .
\end{equation}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
)
def test_simple_namecref(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\usepackage{cleveref}
\begin{document}
The proof of the \namecref{lemma:test2} is easy.
\nameCrefs{lemma:test2} like these are simple to prove.
``\nameCref{eq:hello}'' should work too (we'll add more
\namecrefs{eq:hello} in the future), and so should
``\lcnamecref{eq:hello}'' and ``\lcnamecrefs{eq:hello}.''
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
Here is an equation
\begin{equation}
\label{eq:hello}
a + b = c \ .
\end{equation}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(only_ref_types=('cleveref',), debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: hyperref_aux_preamble + r"""
\newlabel{lemma:test}{{1}{1}{}{lemma.1}{}}
\newlabel{lemma:test@cref}{{[lemma][1][]1}{1}}
\newlabel{eq:hello}{{1}{1}{}{equation.0.1}{}}
\newlabel{eq:hello@cref}{{[equation][1][]1}{1}}
\newlabel{lemma:test2}{{2}{1}{}{lemma.2}{}}
\newlabel{lemma:test2@cref}{{[lemma][2][]2}{1}}
"""
lpp.install_fix( fix )
self.assertEqual(
lpp.execute(latex),
# NOTE: KEEP \protect in output, because the substitution might
# happen somewhere fragile.
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
The proof of the lemma is easy.
Lemmas like these are simple to prove.
``Equation'' should work too (we'll add more
eqs. in the future), and so should
``\protect \MakeLowercase Equation'' and ``\protect \MakeLowercase Equations.''
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
Here is an equation
\begin{equation}
\label{eq:hello}
a + b = c \ .
\end{equation}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
)
def test_crefs_with_prefixes(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\usepackage{cleveref}
\begin{document}
Try \cref*{A-lemma:test}; \cref*{B-eq:hi}; \cref{B-eq:hi,lemma:test2}; \cref{lemma:test2}
Also \crefrange*{A-eq:hello}{B-eq:hi}; \cpagerefrange{A-eq:hello}{B-eq:hi}
Here is a lemma:
\begin{lemma}
\label{A-lemma:test}
Test lemma.
\end{lemma}
Here is an equation
\begin{equation}
\label{A-eq:hello}
a + b = c \ .
\end{equation}
and another:
\begin{equation}
\label{B-eq:hi}
x = y\ .
\end{equation}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(expand_only_prefixes=['A-','B-'], debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: hyperref_aux_preamble + r"""
\newlabel{A-lemma:test}{{1}{1}{}{lemma.1}{}}
\newlabel{A-lemma:test@cref}{{[lemma][1][]1}{1}}
\newlabel{A-eq:hello}{{1}{1}{}{equation.0.1}{}}
\newlabel{A-eq:hello@cref}{{[equation][1][]1}{1}}
\newlabel{B-eq:hi}{{2}{1}{}{equation.0.2}{}}
\newlabel{B-eq:hi@cref}{{[equation][2][]2}{1}}
\newlabel{lemma:test2}{{2}{1}{}{lemma.2}{}}
\newlabel{lemma:test2@cref}{{[lemma][2][]2}{1}}
"""
lpp.install_fix( fix )
self.assertEqual(
lpp.execute(latex),
# NOTE: KEEP \protect in output, because the substitution might
# happen somewhere fragile.
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\usepackage{cleveref}
\begin{document}
Try lemma\protect \nobreakspace 1; eq.\protect \nobreakspace \protect \textup {(2)}; \cref{B-eq:hi,lemma:test2}; \cref{lemma:test2}
Also eqs.\protect \nobreakspace \protect \textup {(1)} to\protect \nobreakspace \protect \textup {(2)}; pages\protect \nobreakspace \protect \hyperlink {equation.0.1}{1} to\protect \nobreakspace \protect \hyperlink {equation.0.2}{1}
Here is a lemma:
\begin{lemma}
\label{A-lemma:test}
Test lemma.
\end{lemma}
Here is an equation
\begin{equation}
\label{A-eq:hello}
a + b = c \ .
\end{equation}
and another:
\begin{equation}
\label{B-eq:hi}
x = y\ .
\end{equation}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
)
def test_simple_labelcref(self):
latex = r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\usepackage{cleveref}
\begin{document}
The proof of Thing~\labelcref{lemma:test,lemma:test2} is easy.
Thing~\labelcref{eq:hello} is also simple to prove.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
Here is an equation
\begin{equation}
\label{eq:hello}
a + b = c \ .
\end{equation}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
lpp = helpers.MockLPP()
fix = ref.ExpandRefs(only_ref_types=('cleveref',), debug_latex_output=True)
fix._get_doc_preamble = fix._get_doc_preamble_recomposed
fix._get_auxfile_contents = lambda: hyperref_aux_preamble + r"""
\newlabel{lemma:test}{{1}{1}{}{lemma.1}{}}
\newlabel{lemma:test@cref}{{[lemma][1][]1}{1}}
\newlabel{eq:hello}{{1}{1}{}{equation.0.1}{}}
\newlabel{eq:hello@cref}{{[equation][1][]1}{1}}
\newlabel{lemma:test2}{{2}{1}{}{lemma.2}{}}
\newlabel{lemma:test2@cref}{{[lemma][2][]2}{1}}
"""
lpp.install_fix( fix )
self.assertEqual(
lpp.execute(latex),
# NOTE: KEEP \protect in output, because the substitution might
# happen somewhere fragile.
r"""
\documentclass[11pt]{article}
\usepackage{amsthm}
\newtheorem{lemma}{Lemma}
\usepackage{hyperref}
\begin{document}
The proof of Thing~\protect \hyperlink {lemma.1}{1} and\protect \nobreakspace \protect \hyperlink {lemma.2}{2} is easy.
Thing~\protect \textup {(\protect \hyperlink {equation.0.1}{1})} is also simple to prove.
Here is a lemma:
\begin{lemma}
\label{lemma:test}
Test lemma.
\end{lemma}
Here is an equation
\begin{equation}
\label{eq:hello}
a + b = c \ .
\end{equation}
And here is another lemma:
\begin{lemma}
\label{lemma:test2}
Another test lemma.
\end{lemma}
\end{document}
"""
)
if __name__ == '__main__':
import logging
logging.basicConfig(level=logging.DEBUG)
helpers.test_main()
| 24.48152
| 238
| 0.667813
| 3,143
| 23,845
| 4.977728
| 0.068406
| 0.035794
| 0.030681
| 0.040908
| 0.892937
| 0.880857
| 0.855545
| 0.83752
| 0.820262
| 0.813423
| 0
| 0.017051
| 0.173579
| 23,845
| 973
| 239
| 24.50668
| 0.77687
| 0.052296
| 0
| 0.782779
| 0
| 0.007828
| 0.554314
| 0.291093
| 0
| 0
| 0
| 0
| 0.033268
| 1
| 0.031311
| false
| 0
| 0.011742
| 0
| 0.046967
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3cb89a3f4819a09d0342fdb6ab6b754f1add08ac
| 235
|
py
|
Python
|
src/sage/manifolds/all.py
|
vbraun/sage
|
07d6c37d18811e2b377a9689790a7c5e24da16ba
|
[
"BSL-1.0"
] | 3
|
2016-06-19T14:48:31.000Z
|
2022-01-28T08:46:01.000Z
|
src/sage/manifolds/all.py
|
rwst/sage
|
a9d274b9338e6ee24bf35ea8d25875507e51e455
|
[
"BSL-1.0"
] | null | null | null |
src/sage/manifolds/all.py
|
rwst/sage
|
a9d274b9338e6ee24bf35ea8d25875507e51e455
|
[
"BSL-1.0"
] | 7
|
2021-11-08T10:01:59.000Z
|
2022-03-03T11:25:52.000Z
|
from sage.misc.lazy_import import lazy_import
lazy_import('sage.manifolds.manifold', 'Manifold')
lazy_import('sage.manifolds.differentiable.real_line', 'OpenInterval')
lazy_import('sage.manifolds.differentiable.real_line', 'RealLine')
| 47
| 70
| 0.825532
| 30
| 235
| 6.233333
| 0.4
| 0.26738
| 0.224599
| 0.368984
| 0.481283
| 0.481283
| 0.481283
| 0
| 0
| 0
| 0
| 0
| 0.042553
| 235
| 4
| 71
| 58.75
| 0.831111
| 0
| 0
| 0
| 0
| 0
| 0.548936
| 0.429787
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3ce07b2f87bc6d5d7c203c92287977d5512103cc
| 13,195
|
py
|
Python
|
miwell-flask-app/tests/functional_tests/page_objects/main_page_objects/psychiatrist_register_page_object.py
|
joshuahigginson1/DevOps-Assessment-1
|
d617522ada565b8b587e2ff7525e1138d1559a75
|
[
"MIT"
] | 1
|
2020-08-09T20:52:42.000Z
|
2020-08-09T20:52:42.000Z
|
miwell-flask-app/tests/functional_tests/page_objects/main_page_objects/psychiatrist_register_page_object.py
|
joshuahigginson1/DevOps-Assessment-1
|
d617522ada565b8b587e2ff7525e1138d1559a75
|
[
"MIT"
] | null | null | null |
miwell-flask-app/tests/functional_tests/page_objects/main_page_objects/psychiatrist_register_page_object.py
|
joshuahigginson1/DevOps-Assessment-1
|
d617522ada565b8b587e2ff7525e1138d1559a75
|
[
"MIT"
] | 1
|
2020-08-08T11:47:27.000Z
|
2020-08-08T11:47:27.000Z
|
# Contains the objects found on our psychiatrist register page.
# Imports -------------------------------------------------------------------------------------------------
from tests.functional_tests.page_objects.common_page_objects import CommonPageObject, PsychiatristNavBar
# Page Objects --------------------------------------------------------------------------------------------
class PsychiatristRegisterPageObject(CommonPageObject, PsychiatristNavBar):
# Default Page Variables.
user_email = 'Email@gmail.com'
user_password = 'Default Password'
user_first_name = 'Default'
user_last_name = 'Psychiatrist'
bacp_number = '1616161616161616'
user_phone_number = '07777777777'
user_postcode = 'L1 7DQ'
psychiatrist_bio = 'I am a robot. Not a registered psychiatrist.'
def get_email_field(self): # A function to return the attributes of our email field.
get_field_element = self.client.find_element_by_xpath('//*[@id="email"]')
get_label_element = self.client.find_element_by_xpath('/html/body/div[2]/form/div[1]/label')
email_field_attributes = {
'field element': get_field_element,
'label name': get_label_element.get_attribute('innerHTML'),
'label element': get_label_element
}
return email_field_attributes
def type_in_email_form(self, input_to_type=user_email): # A function to type text into our email form box.
# Retrieve our form attributes.
get_field_attributes = PsychiatristRegisterPageObject.get_email_field(self)
get_field_element = get_field_attributes['field element']
get_field_label = get_field_attributes['label name']
# After retrieving the field element, simulate typing into a form box.
get_field_element.send_keys(input_to_type)
print(f"Running Simulation: Currently typing '{input_to_type}' in the {get_field_label} field.")
def clear_email_form(self):
# Retrieve our form attributes.
get_field_element = self.get_email_field()['field element']
get_field_element.clear() # Clears our form.
def get_new_password_field(self): # A function to return the attributes of our new password field.
get_field_element = self.client.find_element_by_xpath('//*[@id="password"]')
get_label_element = self.client.find_element_by_xpath('/html/body/div[2]/form/div[2]/label')
new_password_field_attributes = {
'field element': get_field_element,
'label name': get_label_element.get_attribute('innerHTML'),
'label element': get_label_element
}
return new_password_field_attributes
def type_in_new_password_field(self, input_to_type=user_password): # A function to type text into our password form box.
# Retrieve our form attributes.
get_field_attributes = PsychiatristRegisterPageObject.get_new_password_field(self)
get_field_element = get_field_attributes['field element']
get_field_label = get_field_attributes['label name']
# After retrieving the field element, simulate typing into a form box.
get_field_element.send_keys(input_to_type)
print(f"Running Simulation: Currently typing '{input_to_type}' in the {get_field_label} field.")
def clear_new_password_field(self):
# Retrieve our form attributes.
get_field_element = self.get_new_password_field()['field element']
get_field_element.clear() # Clears our form.
def get_confirm_password_field(self): # A function to return the attributes of our confirm password field.
get_field_element = self.client.find_element_by_xpath('//*[@id="confirm_password"]')
get_label_element = self.client.find_element_by_xpath('/html/body/div[2]/form/div[3]/label')
confirm_password_field_attributes = {
'field element': get_field_element,
'label name': get_label_element.get_attribute('innerHTML'),
'label element': get_label_element
}
return confirm_password_field_attributes
def type_in_confirm_password_form(self, input_to_type=user_password):
# Retrieve our form attributes.
get_field_attributes = PsychiatristRegisterPageObject.get_confirm_password_field(self)
get_field_element = get_field_attributes['field element']
get_field_label = get_field_attributes['label name']
# After retrieving the field element, simulate typing into a form box.
get_field_element.send_keys(input_to_type)
print(f"Running Simulation: Currently typing '{input_to_type}' in the {get_field_label} field.")
def clear_confirm_password_field(self):
# Retrieve our form attributes.
get_field_element = self.get_confirm_password_field()['field element']
get_field_element.clear() # Clears our form.
def get_first_name_field(self):
get_field_element = self.client.find_element_by_xpath('//*[@id="first_name"]')
get_label_element = self.client.find_element_by_xpath('/html/body/div[2]/form/div[4]/label')
first_name_field_attributes = {
'field element': get_field_element,
'label name': get_label_element.get_attribute('innerHTML'),
'label element': get_label_element
}
return first_name_field_attributes
def type_in_first_name_form(self, input_to_type=user_first_name):
# Retrieve our form attributes.
get_field_attributes = PsychiatristRegisterPageObject.get_first_name_field(self)
get_field_element = get_field_attributes['field element']
get_field_label = get_field_attributes['label name']
# After retrieving the field element, simulate typing into a form box.
get_field_element.send_keys(input_to_type)
print(f"Running Simulation: Currently typing '{input_to_type}' in the {get_field_label} field.")
def clear_first_name_field(self):
# Retrieve our form attributes.
get_field_element = self.get_new_password_field()['field element']
get_field_element.clear() # Clears our form.
def get_last_name_field(self):
get_field_element = self.client.find_element_by_xpath('//*[@id="last_name"]')
get_label_element = self.client.find_element_by_xpath('/html/body/div[2]/form/div[5]/label')
last_name_field_attributes = {
'field element': get_field_element,
'label name': get_label_element.get_attribute('innerHTML'),
'label element': get_label_element
}
return last_name_field_attributes
def type_in_last_name_form(self, input_to_type=user_last_name):
# Retrieve our form attributes.
get_field_attributes = PsychiatristRegisterPageObject.get_last_name_field(self)
get_field_element = get_field_attributes['field element']
get_field_label = get_field_attributes['label name']
# After retrieving the field element, simulate typing into a form box.
get_field_element.send_keys(input_to_type)
print(f"Running Simulation: Currently typing '{input_to_type}' in the {get_field_label} field.")
def get_bacp_field(self): # A function to return the attributes of our username register field.
get_field_element = self.client.find_element_by_xpath('//*[@id="bacp_number"]')
get_label_element = self.client.find_element_by_xpath('/html/body/div[2]/form/div[6]/label')
bacp_field_attributes = {
'field element': get_field_element,
'label name': get_label_element.get_attribute('innerHTML'),
'label element': get_label_element
}
return bacp_field_attributes
def type_in_bacp_form(self, input_to_type=bacp_number): # A function to type text into our bacp form box.
# Retrieve our form attributes.
get_field_attributes = PsychiatristRegisterPageObject.get_bacp_field(self)
get_field_element = get_field_attributes['field element']
get_field_label = get_field_attributes['label name']
# After retrieving the field element, simulate typing into a form box.
get_field_element.send_keys(input_to_type)
print(f"Running Simulation: Currently typing '{input_to_type}' in the {get_field_label} field.")
def get_phone_number_field(self):
get_field_element = self.client.find_element_by_xpath('//*[@id="phone_number"]')
get_label_element = self.client.find_element_by_xpath('/html/body/div[2]/form/div[7]/label')
phone_number_field_attributes = {
'field element': get_field_element,
'label name': get_label_element.get_attribute('innerHTML'),
'label element': get_label_element
}
return phone_number_field_attributes
def type_in_phone_number_form(self, input_to_type=user_phone_number):
# Retrieve our form attributes.
get_field_attributes = PsychiatristRegisterPageObject.get_phone_number_field(self)
get_field_element = get_field_attributes['field element']
get_field_label = get_field_attributes['label name']
# After retrieving the field element, simulate typing into a form box.
get_field_element.send_keys(input_to_type)
print(f"Running Simulation: Currently typing '{input_to_type}' in the {get_field_label} field.")
def get_postcode_field(self):
get_field_element = self.client.find_element_by_xpath('//*[@id="postcode"]')
get_label_element = self.client.find_element_by_xpath('/html/body/div[2]/form/div[8]/label')
postcode_field_attributes = {
'field element': get_field_element,
'label name': get_label_element.get_attribute('innerHTML'),
'label element': get_label_element
}
return postcode_field_attributes
def type_in_postcode_form(self, input_to_type=user_postcode):
# Retrieve our form attributes.
get_field_attributes = PsychiatristRegisterPageObject.get_postcode_field(self)
get_field_element = get_field_attributes['field element']
get_field_label = get_field_attributes['label name']
# After retrieving the field element, simulate typing into a form box.
get_field_element.send_keys(input_to_type)
print(f"Running Simulation: Currently typing '{input_to_type}' in the {get_field_label} field.")
def get_psychiatrist_bio_field(self):
get_field_element = self.client.find_element_by_xpath('//*[@id="psychiatrist_bio"]')
get_label_element = self.client.find_element_by_xpath('/html/body/div[2]/form/div[9]/label')
psychiatrist_bio_field_attributes = {
'field element': get_field_element,
'label name': get_label_element.get_attribute('innerHTML'),
'label element': get_label_element
}
return psychiatrist_bio_field_attributes
def type_in_psychiatrist_bio_form(self, input_to_type=psychiatrist_bio):
# Retrieve our form attributes.
get_field_attributes = PsychiatristRegisterPageObject.get_psychiatrist_bio_field(self)
get_field_element = get_field_attributes['field element']
get_field_label = get_field_attributes['label name']
# After retrieving the field element, simulate typing into a form box.
get_field_element.send_keys(input_to_type)
print(f"Running Simulation: Currently typing '{input_to_type}' in the {get_field_label} field.")
def get_submit_button(self):
get_button_element = self.client.find_element_by_xpath('//*[@id="submit"]')
submit_button_attributes = {
'button label': get_button_element.get_attribute('innerHTML'),
'button element': get_button_element
}
return submit_button_attributes
def click_submit_button(self):
get_submit_button_element = self.get_submit_button()['button element']
get_submit_button_element.click()
def get_already_registered_button(self):
get_button_element = self.client.find_element_by_xpath('/html/body/a[1]')
already_registered_button_attributes = {
'button label': get_button_element.get_attribute('innerHTML'),
'button element': get_button_element
}
return already_registered_button_attributes
def click_already_registered_button(self):
get_already_registered_button_element = self.get_already_registered_button()['button element']
get_already_registered_button_element.click()
def get_register_as_patient_button(self):
get_button_element = self.client.find_element_by_xpath('/html/body/a[2]')
register_as_patient_button_attributes = {
'button label': get_button_element.get_attribute('innerHTML'),
'button element': get_button_element
}
return register_as_patient_button_attributes
def click_register_as_patient_button(self):
get_register_as_patient_button_element = self.get_register_as_patient_button()['button element']
get_register_as_patient_button_element.click()
| 41.234375
| 125
| 0.703069
| 1,659
| 13,195
| 5.205546
| 0.062688
| 0.082446
| 0.076424
| 0.071793
| 0.864173
| 0.817508
| 0.772696
| 0.755558
| 0.747221
| 0.712019
| 0
| 0.004633
| 0.198484
| 13,195
| 319
| 126
| 41.363636
| 0.811933
| 0.134824
| 0
| 0.40107
| 0
| 0
| 0.199437
| 0.038252
| 0
| 0
| 0
| 0
| 0
| 1
| 0.149733
| false
| 0.096257
| 0.005348
| 0
| 0.26738
| 0.048128
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
5980b4f3a66a83c5baba23cd1485a7d4e9f5a238
| 52,137
|
py
|
Python
|
dlkit/abstract_osid/locale/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/locale/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/locale/sessions.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of locale abstract base class sessions."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class TranslationSession:
"""This session defines methods to translate text between a source and target locale."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_source_language_type(self):
"""Gets the source language used in this session.
:return: the source language
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
source_language_type = property(fget=get_source_language_type)
@abc.abstractmethod
def get_source_script_type(self):
"""Gets the source script used in this session.
:return: the source script
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
source_script_type = property(fget=get_source_script_type)
@abc.abstractmethod
def get_target_language_type(self):
"""Gets the target language used in this session.
:return: the target language
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
target_language_type = property(fget=get_target_language_type)
@abc.abstractmethod
def get_target_script_type(self):
"""Gets the target script used in this session.
:return: the target script
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
target_script_type = property(fget=get_target_script_type)
@abc.abstractmethod
def can_translate(self):
"""Tests if this user can perform language translations.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if translation methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_translation(self, string):
"""Translates the given string into the target language.
:param string: the ``string`` to translate
:type string: ``string``
:return: the translated ``string``
:rtype: ``string``
:raise: ``NotFound`` -- no translation found
:raise: ``NullArgument`` -- ``null`` argument provided
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def translate_string(self, string, default_string):
"""Translates the given string into the target language.
:param string: the ``string`` to translate
:type string: ``string``
:param default_string: the default ``string`` if no translation available.
:type default_string: ``string``
:return: the translated ``string`` or the given default value if no translation available.
:rtype: ``string``
:raise: ``NullArgument`` -- ``null`` argument provided
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def translate_strings(self, strings, default_strings):
"""Translates the given strings into the target language.
:param strings: the ``string`` to translate
:type strings: ``string[]``
:param default_strings: the default ``string`` if no translation available.
:type default_strings: ``string[]``
:return: the translated ``strings`` or the given default value if no translation available.
:rtype: ``string``
:raise: ``NullArgument`` -- ``null`` argument provided
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
class TranslationAdminSession:
"""This session defines methods to translate and format text between a source and target locale."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_source_language_type(self):
"""Gets the source language used in this session.
:return: the source language
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
source_language_type = property(fget=get_source_language_type)
@abc.abstractmethod
def get_source_script_type(self):
"""Gets the source script used in this session.
:return: the source script
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
source_script_type = property(fget=get_source_script_type)
@abc.abstractmethod
def get_target_language_type(self):
"""Gets the target language used in this session.
:return: the target language
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
target_language_type = property(fget=get_target_language_type)
@abc.abstractmethod
def get_target_script_type(self):
"""Gets the target script used in this session.
:return: the target script
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
target_script_type = property(fget=get_target_script_type)
@abc.abstractmethod
def can_update_translation(self):
"""Tests if this user can update localization strings.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer
notification operations.
:return: ``false`` if changing translation is not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def add_translation(self, source_text, target_text):
"""Adds or updates a string translation.
:param source_text: the source ``string``
:type source_text: ``string``
:param target_text: the translated string
:type target_text: ``string``
:raise: ``NullArgument`` -- ``source_text`` or ``target_text`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def remove_translation(self, source_text):
"""Removes a translation.
:param source_text: the source ``string``
:type source_text: ``string``
:raise: ``NullArgument`` -- ``source_text`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
pass
class NumericFormattingSession:
"""This session defines methods to format and parse numbers."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_numeric_format_type(self):
"""Gets the numeric format type used in this session.
The numeric format type indicates the format of a number used in
a culture, such as the use of a period for a decimal place.
:return: the target language
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
numeric_format_type = property(fget=get_numeric_format_type)
@abc.abstractmethod
def can_format_numbers(self):
"""Tests if this user can format and parse numbers.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if translation methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def cardinal_to_string(self, c):
"""Gets a string representation of a cardinal.
:param c: a cardinal value
:type c: ``cardinal``
:return: the display string
:rtype: ``string``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def cardinals_to_strings(self, c):
"""Gets a string representation of an array of cardinals.
:param c: a cardinal value array
:type c: ``cardinal[]``
:return: the display strings
:rtype: ``string``
:raise: ``NullArgument`` -- ``c`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def string_to_cardinal(self, str_):
"""Parses a cardinal string.
:param str: a cardinal string
:type str: ``string``
:return: the cardinal value
:rtype: ``cardinal``
:raise: ``InvalidArgument`` -- ``str`` not of ``get_numeric_format_type()``
:raise: ``NullArgument`` -- ``str`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # cardinal
@abc.abstractmethod
def decimal_to_string(self, d):
"""Gets a string representation of a decimal.
:param d: a decimal value
:type d: ``decimal``
:return: the display string
:rtype: ``string``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def decimals_to_strings(self, d):
"""Gets a string representation of an array of decimals.
:param d: a decimals value array
:type d: ``decimal[]``
:return: the display strings
:rtype: ``string``
:raise: ``NullArgument`` -- ``d`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def decimal_to_string(self, str_):
"""Parses a decimal string.
:param str: a decimal string
:type str: ``string``
:return: the decimal value
:rtype: ``decimal``
:raise: ``InvalidArgument`` -- ``str`` not of ``get_numeric_format_type()``
:raise: ``NullArgument`` -- ``str`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
@abc.abstractmethod
def integer_to_string(self, i):
"""Gets a string representation of a integer.
:param i: an integer value
:type i: ``integer``
:return: the display string
:rtype: ``string``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def integers_to_strings(self, i):
"""Gets a string representation of an array of integers.
:param i: an integer value array
:type i: ``integer[]``
:return: the display strings
:rtype: ``string``
:raise: ``NullArgument`` -- ``i`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def integer_to_string(self, str_):
"""Parses an integer string.
:param str: an integer string
:type str: ``string``
:return: the integer value
:rtype: ``integer``
:raise: ``InvalidArgument`` -- ``str`` not of ``get_numeric_format_type()``
:raise: ``NullArgument`` -- ``str`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # integer
class CalendarFormattingSession:
"""This session defines methods to format and parse date times of the calendar and time type defined."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_calendar_type(self):
"""Gets the calendar type for the datetimes used in this session.
:return: the calendar type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
calendar_type = property(fget=get_calendar_type)
@abc.abstractmethod
def get_time_type(self):
"""Gets the time type for the times used in this session.
:return: the time type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
time_type = property(fget=get_time_type)
@abc.abstractmethod
def get_date_format_type(self):
"""Gets the date format type used in this session.
:return: the target language
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
date_format_type = property(fget=get_date_format_type)
@abc.abstractmethod
def get_time_format_type(self):
"""Gets the time format type used in this session.
:return: the target script
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
time_format_type = property(fget=get_time_format_type)
@abc.abstractmethod
def can_display_primitives(self):
"""Tests if this user can format and parse date times.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if translation methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def datetime_to_string(self, datetime):
"""Gets a string representation of a datetime.
:param datetime: a datetime value
:type datetime: ``osid.calendaring.DateTime``
:return: the display string
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``datetime.get_calendar_type() != get_calendar_type()`` or ``datetime.get_time_type() != get_time_type()``
:raise: ``NullArgument`` -- ``datetime`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def datetimes_to_strings(self, datetimes):
"""Gets a string representation of a list of datetimes.
:param datetimes: a datetime value list
:type datetimes: ``osid.calendaring.DateTimeList``
:return: the display strings
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``datetime.get_calendar_type() != get_calendar_type()`` or ``datetime.get_time_type() != get_time_type()``
:raise: ``NullArgument`` -- ``datetimes`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def string_to_date_time(self, s):
"""Parses a date time string.
:param s: a datetime string
:type s: ``string``
:return: the date time value
:rtype: ``osid.calendaring.DateTime``
:raise: ``InvalidArgument`` -- ``s`` is not of ``get_date_format_type()`` or ``s`` is not of ``get_time_format_type()``
:raise: ``NullArgument`` -- ``s`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.DateTime
@abc.abstractmethod
def time_to_string(self, time):
"""Gets a string representation of a time.
:param time: a time value
:type time: ``osid.calendaring.Time``
:return: the display string
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``time.get_time_type() != get_time_type()``
:raise: ``NullArgument`` -- ``time`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def times_to_strings(self, times):
"""Gets a string representation of a list of times.
:param times: a time value list
:type times: ``osid.calendaring.TimeList``
:return: the display strings
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``time.get_time_type()`` != ``get_time_type()``
:raise: ``NullArgument`` -- ``times`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def string_to_time(self, s):
"""Parses a time string.
:param s: a time string
:type s: ``string``
:return: the time value
:rtype: ``osid.calendaring.Time``
:raise: ``InvalidArgument`` -- ``s`` is not of ``get_time_format_type()``
:raise: ``NullArgument`` -- ``s`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Time
@abc.abstractmethod
def duration_to_string(self, duration):
"""Gets a string representation of a duration.
:param duration: a duration value
:type duration: ``osid.calendaring.Duration``
:return: the display string
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``duration.get_calendar_type() !=`` ``get_calendar_type()`` or ``duration.get_time_type() != get_time_type()``
:raise: ``NullArgument`` -- ``duration`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def durations_to_strings(self, durations):
"""Gets a string representation of a list of durations.
:param durations: a duration value list
:type durations: ``osid.calendaring.DurationList``
:return: the display strings
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``duration.get_calendar_type() !=`` ``get_calendar_type()`` or ``duration.get_time_type() != get_time_type()``
:raise: ``NullArgument`` -- ``durations`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def string_to_duration(self, s):
"""Parses a duration string.
:param s: a duration string
:type s: ``string``
:return: the duration value
:rtype: ``osid.calendaring.Duration``
:raise: ``InvalidArgument`` -- ``s`` is not of ``get_date_format_type()`` or ``s`` is not of ``get_time_format_type()``
:raise: ``NullArgument`` -- ``s`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.Duration
class CurrencyFormattingSession:
"""This session defines methods to format and parse currency amounts."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_currency_type(self):
"""Gets the currency type for amounts used in this session.
:return: the currency type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
currency_type = property(fget=get_currency_type)
@abc.abstractmethod
def get_numeric_format_type(self):
"""Gets the numeric format type for the amounts used in this session.
:return: the numeric format type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
numeric_format_type = property(fget=get_numeric_format_type)
@abc.abstractmethod
def can_format_currencies(self):
"""Tests if this user can format and parse currencies.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if translation methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def currency_to_string(self, amount):
"""Gets a string representation of a currency including the currency symbol indicated by the currency type.
:param amount: a currency value
:type amount: ``osid.financials.Currency``
:return: the display string
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``amount.get_currency_type() != get_currency_type()``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def currencies_to_strings(self, amounts):
"""Gets a string representation of a list of currency amounts including the currency symbols indicated by the currency type.
:param amounts: an array of amounts
:type amounts: ``osid.financials.Currency[]``
:return: the display strings
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``amount.get_currency_type() != get_currency_type()``
:raise: ``NullArgument`` -- ``amounts`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def string_to_currency(self, s):
"""Parses a currency amount.
:param s: a currency string
:type s: ``string``
:return: the currency amount
:rtype: ``osid.financials.Currency``
:raise: ``InvalidArgument`` -- s is not of ``get_currency_type()`` or s is not of ``get_numeric_format_type()``
:raise: ``NullArgument`` -- ``s`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.financials.Currency
class CoordinateFormattingSession:
"""This session defines methods to format and parse coordinates."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_coordinate_type(self):
"""Gets the coordinate type used in this session.
:return: the coordinate type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
coordinate_type = property(fget=get_coordinate_type)
@abc.abstractmethod
def get_coordinate_format_type(self):
"""Gets the coordinate format type used in this session.
:return: the coordinate format type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
coordinate_format_type = property(fget=get_coordinate_format_type)
@abc.abstractmethod
def can_format_coordinates(self):
"""Tests if this user can format and parse coordinates.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if translation methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def coordinate_to_string(self, coordinate):
"""Gets a string representation of a coordinate.
:param coordinate: a coordinate value
:type coordinate: ``osid.mapping.Coordinate``
:return: the display string
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``coodrinate.has_record_type(get_coordinate_record_type()`` ) is ``false``
:raise: ``NullArgument`` -- ``coordinate`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def ccoordinates_to_strings(self, coordinates):
"""Gets a string representation of a list of coordinates.
:param coordinates: a list of coordinates
:type coordinates: ``osid.mapping.CoordinateList``
:return: the display strings
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``coodrinate.has_record_type(get_coordinate_record_type()`` ) is ``false``
:raise: ``NullArgument`` -- ``coordinates`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def string_to_coordinate(self, s):
"""Parses a coordinate.
:param s: a coordinate string
:type s: ``string``
:return: the display string
:rtype: ``osid.mapping.Coordinate``
:raise: ``InvalidArgument`` -- ``s`` is not of ``get_coordinate_format_type()``
:raise: ``NullArgument`` -- ``s`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Coordinate
class UnitConversionSession:
"""This session defines methods to convert units across measurement systems."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def can_convert_units(self):
"""Tests if this user can perform unit conversions.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if conversion methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def convert_unit(self, source_unit, source_unit_type, target_unit_type):
"""Convert a unit of measurement.
:param source_unit: the measure to convert
:type source_unit: ``decimal``
:param source_unit_type: the type of measure specified
:type source_unit_type: ``osid.type.Type``
:param target_unit_type: the type of converted measure
:type target_unit_type: ``osid.type.Type``
:return: resulting measure
:rtype: ``decimal``
:raise: ``NullArgument`` -- ``null`` argument provided
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``LocaleManager.supportsUnitTypesForConversion(measureType, conversionType)`` is false
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
@abc.abstractmethod
def convert_units(self, source_units, source_unit_type, target_unit_type):
"""Convert units of measurement.
:param source_units: the measures to convert
:type source_units: ``decimal[]``
:param source_unit_type: the type of measure specified
:type source_unit_type: ``osid.type.Type``
:param target_unit_type: the type of converted measure
:type target_unit_type: ``osid.type.Type``
:return: resulting measures
:rtype: ``decimal``
:raise: ``NullArgument`` -- ``null`` argument provided
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
:raise: ``Unsupported`` -- ``LocaleManager.supportsUnitTypesForConversion(measureType, conversionType)`` is false
*compliance: mandatory -- This method must be implemented.*
"""
return # decimal
class CurrencyConversionSession:
"""This session defines methods to convert currency."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_source_currency_type(self):
"""Gets the source currency type used in this session.
:return: the source currency
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
source_currency_type = property(fget=get_source_currency_type)
@abc.abstractmethod
def get_target_currency_type(self):
"""Gets the target currency type used in this session.
:return: the target currency
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
target_currency_type = property(fget=get_target_currency_type)
@abc.abstractmethod
def can_convert_currency(self):
"""Tests if this user can perform currency conversions.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if conversion methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def convert_currency(self, source_currency_amount):
"""Converts currency.
:param source_currency_amount: the currency amount to convert
:type source_currency_amount: ``osid.financials.Currency``
:return: resulting currency units
:rtype: ``osid.financials.Currency``
:raise: ``InvalidArgument`` -- ``sourceCurrencyAmount.get_currency_type() != get_sourcecurrency_type()``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.financials.Currency
@abc.abstractmethod
def convert_currencies(self, source_currency_amounts):
"""Converts currencies.
:param source_currency_amounts: the currency amounts to convert
:type source_currency_amounts: ``osid.financials.Currency[]``
:return: resulting currency units
:rtype: ``osid.financials.Currency``
:raise: ``InvalidArgument`` -- ``sourceCurrencyAmount.get_currency_type() != get_sourcecurrency_type()``
:raise: ``NullArgument`` -- ``source_currency_amounts`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.financials.Currency
class CalendarConversionSession:
"""This session defines methods to convert dates across calendar systems."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_source_calendar_type(self):
"""Gets the source calendar type used in this session.
:return: the source calendar type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
source_calendar_type = property(fget=get_source_calendar_type)
@abc.abstractmethod
def get_source_time_type(self):
"""Gets the source time type used in this session.
:return: the source time type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
source_time_type = property(fget=get_source_time_type)
@abc.abstractmethod
def get_target_calendar_type(self):
"""Gets the target calendar type used in this session.
:return: the target calendar
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
target_calendar_type = property(fget=get_target_calendar_type)
@abc.abstractmethod
def get_target_time_type(self):
"""Gets the target time type used in this session.
:return: the target time type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
target_time_type = property(fget=get_target_time_type)
@abc.abstractmethod
def can_convert_calendars(self):
"""Tests if this user can perform calendar conversions.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if conversion methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def convert_calendar(self, source_date):
"""Converts a date.
:param source_date: the date to convert
:type source_date: ``osid.calendaring.DateTime``
:return: the resulting date
:rtype: ``osid.calendaring.DateTime``
:raise: ``InvalidArgument`` -- ``sourceDate.get_calendar_type() != get_source_calendar_type()`` or ``sourceDate.get_time_type() != get_source_time_type()``
:raise: ``NullArgument`` -- ``source_date`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.DateTime
@abc.abstractmethod
def convert_calendars(self, source_date_list):
"""Converts dates.
:param source_date_list: the dates to convert
:type source_date_list: ``osid.calendaring.DateTimeList``
:return: the resulting dates
:rtype: ``osid.calendaring.DateTimeList``
:raise: ``InvalidArgument`` -- ``sourceDate.get_calendar_type() != get_source_calendar_type()`` or ``sourceDate.get_time_type() != get_source_time_type()``
:raise: ``NullArgument`` -- ``source_date_list`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.calendaring.DateTimeList
class CoordinateConversionSession:
"""This session defines methods to convert coordinates across coordinate systems."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_source_coordinate_type(self):
"""Gets the source coordinate type used in this session.
:return: the source coordinate type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
source_coordinate_type = property(fget=get_source_coordinate_type)
@abc.abstractmethod
def get_target_coordinate_type(self):
"""Gets the target coordinate type used in this session.
:return: the target coordinate type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
target_coordinate_type = property(fget=get_target_coordinate_type)
@abc.abstractmethod
def can_convert_coordinates(self):
"""Tests if this user can perform coordinate conversions.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if conversion methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def convert_coordinate(self, source_coordinate):
"""Converts a coordinate.
:param source_coordinate: the coordinate to convert
:type source_coordinate: ``osid.mapping.Coordinate``
:return: the resulting coordinate
:rtype: ``osid.mapping.Coordinate``
:raise: ``InvalidArgument`` -- ``sourceCoordinate.has_type(get_source_coordinate_record_type())`` is ``false``
:raise: ``NullArgument`` -- ``source_coordinate`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.Coordinate
@abc.abstractmethod
def convert_coordinates(self, source_coordinate_list):
"""Converts coordinates.
:param source_coordinate_list: the coordinates to convert
:type source_coordinate_list: ``osid.mapping.CoordinateList``
:return: the resulting coordinates
:rtype: ``osid.mapping.CoordinateList``
:raise: ``InvalidArgument`` -- ``sourceCoordinate.has_type(get_source_coordinate_record_type())`` is ``false``
:raise: ``NullArgument`` -- ``source_coordinate_list`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.CoordinateList
class SpatialUnitConversionSession:
"""This session defines methods to convert spatial units."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_source_spatial_unit_record_type(self):
"""Gets the source spatial unit record type used in this session.
:return: the source spatial unit record type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
source_spatial_unit_record_type = property(fget=get_source_spatial_unit_record_type)
@abc.abstractmethod
def get_target_spatial_unit_record_type(self):
"""Gets the target spatial unit record type used in this session.
:return: the target spatial unit record type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
target_spatial_unit_record_type = property(fget=get_target_spatial_unit_record_type)
@abc.abstractmethod
def can_convert_spatial_units(self):
"""Tests if this user can perform spatial unit conversions.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if conversion methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def convert_spatial_unit(self, source_spatial_unit):
"""Converts a spatial unit.
:param source_spatial_unit: the spatial unit to convert
:type source_spatial_unit: ``osid.mapping.SpatialUnit``
:return: the resulting spatial unit
:rtype: ``osid.mapping.SpatialUnit``
:raise: ``InvalidArgument`` -- ``sourceSpatialUnit.has_type(get_source_spatial_unit_record_type())`` is ``false``
:raise: ``NullArgument`` -- ``source_spatial_unit`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.SpatialUnit
@abc.abstractmethod
def convert_spatial_units(self, source_spatial_unit_list):
"""Converts spatial units.
:param source_spatial_unit_list: the spatial units to convert
:type source_spatial_unit_list: ``osid.mapping.SpatialUnitList``
:return: the resulting spatial units
:rtype: ``osid.mapping.SpatialUnitList``
:raise: ``InvalidArgument`` -- ``sourceSpatialUnit.has_type(get_source_spatial_unit_record_type())`` is ``false``
:raise: ``NullArgument`` -- ``source_spatial_unit_list`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.mapping.SpatialUnitList
class FormatConversionSession:
"""This session defines methods to convert text formats."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_source_format_type(self):
"""Gets the source format type used in this session.
:return: the source text format type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
source_format_type = property(fget=get_source_format_type)
@abc.abstractmethod
def get_target_format_type(self):
"""Gets the target format type used in this session.
:return: the target text formattype
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
target_format_type = property(fget=get_target_format_type)
@abc.abstractmethod
def can_convert_formats(self):
"""Tests if this user can perform text format.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if conversion methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def convert_format(self, source_text):
"""Converts a format.
:param source_text: the string to convert
:type source_text: ``string``
:return: the resulting string
:rtype: ``string``
:raise: ``InvalidArgument`` -- ``source_text`` not of source format
:raise: ``NullArgument`` -- ``source_text`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
@abc.abstractmethod
def convert_formats(self, source_texts):
"""Converts formats.
:param source_texts: the strings to convert
:type source_texts: ``string[]``
:return: the resulting strings
:rtype: ``string``
:raise: ``InvalidArgument`` -- a ``source_text not of source format``
:raise: ``NullArgument`` -- ``source_texts`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # string
class CalendarInfoSession:
"""This session defines methods to examine a calendar."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_calendar_type(self):
"""Gets the calendar type for the calendar system informational methods in this session.
:return: the calendar type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
calendar_type = property(fget=get_calendar_type)
@abc.abstractmethod
def get_time_type(self):
"""Gets the time system type for the time system informational methods in this session.
:return: the time type
:rtype: ``osid.type.Type``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.type.Type
time_type = property(fget=get_time_type)
@abc.abstractmethod
def can_examine_calendars(self):
"""Tests if this user can perform calendar inspections.
A return of true does not guarantee successful authorization. A
return of false indicates that it is known all methods in this
session will result in a ``PermissionDenied``. This is intended
as a hint to an application that may opt not to offer lookup
operations.
:return: ``false`` if conversion methods are not authorized, ``true`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_calendar_info(self):
"""Gets information about the calendar.
:return: calendar information
:rtype: ``osid.locale.CalendarInfo``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.locale.CalendarInfo
calendar_info = property(fget=get_calendar_info)
@abc.abstractmethod
def get_time_info(self):
"""Gets information about the time system.
:return: time information
:rtype: ``osid.locale.TimeInfo``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``PermissionDenied`` -- authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.locale.TimeInfo
time_info = property(fget=get_time_info)
| 33.378361
| 163
| 0.641195
| 5,677
| 52,137
| 5.772239
| 0.046503
| 0.045134
| 0.053099
| 0.076994
| 0.841863
| 0.788489
| 0.732247
| 0.700082
| 0.651652
| 0.633739
| 0
| 0
| 0.253467
| 52,137
| 1,561
| 164
| 33.399744
| 0.841915
| 0.670445
| 0
| 0.669782
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.271028
| false
| 0.006231
| 0.003115
| 0
| 0.722741
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
59b892e37e1d12878b7b8161a04f2b439ac0e758
| 12,319
|
py
|
Python
|
src/game/AI/test_ai.py
|
insoPL/QtDraughts
|
62368e6523bf3dd50752ae5dffc65e946775f58d
|
[
"MIT"
] | null | null | null |
src/game/AI/test_ai.py
|
insoPL/QtDraughts
|
62368e6523bf3dd50752ae5dffc65e946775f58d
|
[
"MIT"
] | null | null | null |
src/game/AI/test_ai.py
|
insoPL/QtDraughts
|
62368e6523bf3dd50752ae5dffc65e946775f58d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from game.AI import ai_test
from game.AI import _ai_rek_another_attack_in_a_row
from tools import *
from settings import Settings
class TestAi:
def test_no_possible_movement_empty_board(self):
default_settings = Settings(default=True)
tested_board = ListOfPieces([], [])
asserted_board = ListOfPieces([], [])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
def test_no_possible_movement_end_of_board(self):
default_settings = Settings(default=True)
tested_board = ListOfPieces([], [(2, 0)])
asserted_board = ListOfPieces([], [(2, 0)])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
tested_board = ListOfPieces([(0, 0), (2, 0)], [(1, 1)])
asserted_board = ListOfPieces([(0, 0), (2, 0)], [(1, 1)])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
def test_simple_move(self):
default_settings = Settings(default=True)
tested_board = ListOfPieces([], [(7,7)])
asserted_board = ListOfPieces([], [(6,6)])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
tested_board = ListOfPieces([(0,6)], [(1,7)])
asserted_board = ListOfPieces([(0,6)], [(2,6)])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
def test_simple_attack_move(self):
default_settings = Settings(default=True)
tested_board = ListOfPieces([(1,1)],[(2,2)])
asserted_board = ListOfPieces([],[(0,0)])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
tested_board = ListOfPieces([(7,3),(5,3)],[(6,4)])
asserted_board = ListOfPieces([(7,3)],[(4,2)])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
def test_not_making_losing_moves(self):
default_settings = Settings(default=True)
tested_board = ListOfPieces([(1,1)],[(3,3)])
asserted_board = ListOfPieces([(1,1)],[(4,2)])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
tested_board = ListOfPieces([(2,2),(1,1)],[(3,3),(7,7)])
asserted_board = ListOfPieces([(2,2),(1,1)],[(4,2),(7,7)])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
def test_guessing_enemy_moves(self):
default_settings = Settings(default=True)
tested_board = ListOfPieces([(0,0),(2,2),(4,2)],[(3,3)])
asserted_board = ListOfPieces([(0,0),(2,2)],[(5,1)])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
def test_multiple_attack(self):
default_settings = Settings(default=True)
tested_board = ListOfPieces([(1,1),(3,3),(5,3)],[(4,4)])
asserted_board = ListOfPieces([(5,3)],[(0,0)])
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
def test_multiple_attack_recursive_function(self):
wynik = _ai_rek_another_attack_in_a_row([(1, 1), (3, 3)], [(4, 4)], Color.black, (4, 4))
assert wynik == ((0,0),[(3,3),(1,1)])
wynik = _ai_rek_another_attack_in_a_row([(1, 1), (3, 3), (5, 3)], [(4, 4)], Color.black, (4, 4))
assert wynik == ((0,0),[(3,3),(1,1)])
def test_setting_force_attack(self):
default_settings = Settings(default=True)
tested_board = """" +---------------+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| |w| |b| | | | |
|-+-+-+-+-+-+-+-+
| | |w| | | | | |
|-+-+-+-+-+-+-+-+
| | | |b| |b| | |
|-+-+-+-+-+-+-+-+
|w| |w| | | | | |
+---------------+"""
asserted_board = """"+---------------+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| |w| | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| |b| |b| |b| | |
|-+-+-+-+-+-+-+-+
|w| |w| | | | | |
+---------------+"""
tested_board = str_to_cords(tested_board)
asserted_board = str_to_cords(asserted_board)
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
default_settings.force_attack=False
tested_board = """" +---------------+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| |w| |b| | | | |
|-+-+-+-+-+-+-+-+
| | |w| | | | | |
|-+-+-+-+-+-+-+-+
| | | |b| |b| | |
|-+-+-+-+-+-+-+-+
|w| |w| | | | | |
+---------------+"""
asserted_board = """"+---------------+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| |w| |b| | | | |
|-+-+-+-+-+-+-+-+
| | |w| | | | | |
|-+-+-+-+-+-+-+-+
| | | | | |b| | |
|-+-+-+-+-+-+-+-+
|w| |w| |b| | | |
+---------------+"""
tested_board = str_to_cords(tested_board)
asserted_board = str_to_cords(asserted_board)
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
def test_setting_multiple_attack(self):
default_settings = Settings(default=True)
tested_board = """" +---------------+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | |b| | | |
|-+-+-+-+-+-+-+-+
| | | |w| |w| | |
|-+-+-+-+-+-+-+-+
| | | | | | |w| |
|-+-+-+-+-+-+-+-+
| |w| | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
+---------------+"""
asserted_board = """"+---------------+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | |w| | |
|-+-+-+-+-+-+-+-+
| | | | | | |w| |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
|b| | | | | | | |
+---------------+"""
tested_board = str_to_cords(tested_board)
asserted_board = str_to_cords(asserted_board)
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
default_settings.multiple_attack=False
tested_board = """" +---------------+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | |b| | | |
|-+-+-+-+-+-+-+-+
| | | |w| |w| | |
|-+-+-+-+-+-+-+-+
| | | | | | |w| |
|-+-+-+-+-+-+-+-+
| |w| | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
+---------------+"""
asserted_board = """"+---------------+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | |w| | |
|-+-+-+-+-+-+-+-+
| | |b| | | |w| |
|-+-+-+-+-+-+-+-+
| |w| | | | | | |
|-+-+-+-+-+-+-+-+
| | | | | | | | |
+---------------+"""
tested_board = str_to_cords(tested_board)
asserted_board = str_to_cords(asserted_board)
ai_move = ai_test(tested_board.white_pieces, tested_board.black_pieces, default_settings)
tested_board.apply_move(ai_move)
assert tested_board == asserted_board
| 44.31295
| 104
| 0.340125
| 854
| 12,319
| 4.540984
| 0.077283
| 0.235431
| 0.093089
| 0.117586
| 0.909489
| 0.866426
| 0.849407
| 0.84296
| 0.834709
| 0.823363
| 0
| 0.018502
| 0.451579
| 12,319
| 277
| 105
| 44.472924
| 0.555506
| 0.001705
| 0
| 0.809129
| 0
| 0
| 0.501627
| 0
| 0
| 0
| 0
| 0
| 0.149378
| 1
| 0.041494
| false
| 0
| 0.016598
| 0
| 0.062241
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.