hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b3fcaad834e8a409bfc302c97f3b073c56d1eff3 | 8,235 | py | Python | backend/api/migrations/0006_creditvalue_make_model_modelyear_trim_type_vehicle.py | kuanfan99/zeva | 57b506a108fe57438506569d5503c90c52216b2f | [
"Apache-2.0"
] | 3 | 2020-03-25T03:06:20.000Z | 2021-01-20T23:36:03.000Z | backend/api/migrations/0006_creditvalue_make_model_modelyear_trim_type_vehicle.py | kuanfan99/zeva | 57b506a108fe57438506569d5503c90c52216b2f | [
"Apache-2.0"
] | 740 | 2019-12-16T15:53:39.000Z | 2022-03-26T08:25:10.000Z | backend/api/migrations/0006_creditvalue_make_model_modelyear_trim_type_vehicle.py | kuanfan99/zeva | 57b506a108fe57438506569d5503c90c52216b2f | [
"Apache-2.0"
] | 11 | 2019-11-28T20:39:15.000Z | 2022-01-31T17:53:31.000Z | # Generated by Django 3.0.2 on 2020-01-13 10:53
import db_comments.model_mixins
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0005_auto_20200113_0612'),
]
operations = [
migrations.CreateModel(
name='CreditValue',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('update_timestamp', models.DateTimeField(auto_now=True, null=True)),
('a', models.DecimalField(decimal_places=3, max_digits=5, null=True)),
('b', models.DecimalField(decimal_places=3, max_digits=5, null=True)),
('create_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_creditvalue_CREATE_USER', to='api.UserProfile')),
('update_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_creditvalue_UPDATE_USER', to='api.UserProfile')),
],
options={
'db_table': 'credits',
},
bases=(models.Model, db_comments.model_mixins.DBComments),
),
migrations.CreateModel(
name='Make',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('update_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=250)),
('create_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_make_CREATE_USER', to='api.UserProfile')),
('update_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_make_UPDATE_USER', to='api.UserProfile')),
],
options={
'db_table': 'make',
},
bases=(models.Model, db_comments.model_mixins.DBComments),
),
migrations.CreateModel(
name='Model',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('update_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=250)),
('create_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_model_CREATE_USER', to='api.UserProfile')),
('make', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='valid_models', to='api.Make')),
('update_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_model_UPDATE_USER', to='api.UserProfile')),
],
options={
'db_table': 'model',
},
bases=(models.Model, db_comments.model_mixins.DBComments),
),
migrations.CreateModel(
name='ModelYear',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('update_timestamp', models.DateTimeField(auto_now=True, null=True)),
('effective_date', models.DateField(blank=True, null=True)),
('expiration_date', models.DateField(blank=True, null=True)),
('name', models.CharField(max_length=250, unique=True)),
('create_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_modelyear_CREATE_USER', to='api.UserProfile')),
('update_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_modelyear_UPDATE_USER', to='api.UserProfile')),
],
options={
'db_table': 'model_year',
},
bases=(models.Model, db_comments.model_mixins.DBComments),
),
migrations.CreateModel(
name='Trim',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('update_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=250)),
('create_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_trim_CREATE_USER', to='api.UserProfile')),
('model', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='valid_trims', to='api.Model')),
('update_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_trim_UPDATE_USER', to='api.UserProfile')),
],
options={
'db_table': 'trim',
},
bases=(models.Model, db_comments.model_mixins.DBComments),
),
migrations.CreateModel(
name='Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('update_timestamp', models.DateTimeField(auto_now=True, null=True)),
('name', models.CharField(max_length=250)),
('create_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_type_CREATE_USER', to='api.UserProfile')),
('update_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_type_UPDATE_USER', to='api.UserProfile')),
],
options={
'db_table': 'type',
},
bases=(models.Model, db_comments.model_mixins.DBComments),
),
migrations.CreateModel(
name='Vehicle',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_timestamp', models.DateTimeField(auto_now_add=True, null=True)),
('update_timestamp', models.DateTimeField(auto_now=True, null=True)),
('range', models.IntegerField()),
('create_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_vehicle_CREATE_USER', to='api.UserProfile')),
('credit_value', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='api.CreditValue')),
('make', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='api.Make')),
('model', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='api.Model')),
('model_year', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='api.ModelYear')),
('trim', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='api.Trim')),
('update_user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_vehicle_UPDATE_USER', to='api.UserProfile')),
],
options={
'db_table': 'vehicle',
'unique_together': {('make', 'model', 'trim', 'model_year')},
},
bases=(models.Model, db_comments.model_mixins.DBComments),
),
]
| 62.386364 | 185 | 0.622101 | 909 | 8,235 | 5.421342 | 0.105611 | 0.051948 | 0.073052 | 0.098214 | 0.901786 | 0.885958 | 0.885958 | 0.866274 | 0.82569 | 0.796063 | 0 | 0.007926 | 0.234001 | 8,235 | 131 | 186 | 62.862595 | 0.773304 | 0.005464 | 0 | 0.536 | 1 | 0 | 0.161944 | 0.026258 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.024 | 0 | 0.048 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
376aa7b4ef6a7313463ca5166556bd6e3a57e7d7 | 2,585 | py | Python | series/tests/test_series_views.py | sysvii/fetch-django | 948e70a46cf1b0138a5a3b53cf92349ee22640fc | [
"MIT"
] | null | null | null | series/tests/test_series_views.py | sysvii/fetch-django | 948e70a46cf1b0138a5a3b53cf92349ee22640fc | [
"MIT"
] | 3 | 2016-03-26T00:42:51.000Z | 2016-04-29T11:33:18.000Z | series/tests/test_series_views.py | sysvii/fetch-django | 948e70a46cf1b0138a5a3b53cf92349ee22640fc | [
"MIT"
] | null | null | null | import json
from django.test import TestCase, Client
from series.models import Series
class TestSeriesViews(TestCase):
fixtures = ["test_series.json", "test_provider.json"]
def test_index_json(self):
c = Client(CONTENT_TYPE='application/json')
res = c.get('/series/')
self.assertTemplateUsed(res, 'series/index.json')
self.assertEquals(res.status_code, 200)
json.loads(res.content.decode(res.charset))
def test_index_html(self):
c = Client()
res = c.get('/series/')
self.assertTemplateUsed(res, 'series/index.html')
self.assertEquals(res.status_code, 200)
def test_index_mediatype_json(self):
c = Client(CONTENT_TYPE='application/json')
res = c.get('/series/media/')
self.assertTemplateUsed(res, 'series/media_index.json')
self.assertEquals(res.status_code, 200)
json.loads(res.content.decode(res.charset))
def test_index_mediatype_html(self):
c = Client()
res = c.get('/series/media/')
self.assertTemplateUsed(res, 'series/media_index.html')
self.assertEquals(res.status_code, 200)
def test_view_html(self):
c = Client()
res = c.get('/series/1/')
self.assertTemplateUsed(res, 'series/view.html')
self.assertEquals(res.status_code, 200)
def test_new_html(self):
c = Client()
res = c.get('/series/new/')
self.assertTemplateUsed(res, 'series/new.html')
self.assertTemplateUsed(res, 'series/form_series.html')
self.assertEquals(res.status_code, 200)
def test_edit_html(self):
c = Client()
res = c.get('/series/1/edit/')
self.assertTemplateUsed(res, 'series/edit.html')
self.assertTemplateUsed(res, 'series/form_series.html')
self.assertEquals(res.status_code, 200)
def test_view_mediatype_html(self):
c = Client()
res = c.get('/series/media/1/')
self.assertTemplateUsed(res, 'series/media_view.html')
self.assertEquals(res.status_code, 200)
def test_view_mediatype_json(self):
c = Client(CONTENT_TYPE='application/json')
res = c.get('/series/media/1/')
self.assertTemplateUsed(res, 'series/media_view.json')
self.assertEquals(res.status_code, 200)
json.loads(res.content.decode(res.charset))
def test_index_contains_entry(self):
c = Client()
res = c.get('/series/')
html = res.content.decode(res.charset)
self.assertTrue('Test Case' in html)
| 25.097087 | 63 | 0.63675 | 319 | 2,585 | 5.018809 | 0.134796 | 0.069956 | 0.171768 | 0.212992 | 0.823235 | 0.787633 | 0.787633 | 0.772642 | 0.745784 | 0.662711 | 0 | 0.015562 | 0.2294 | 2,585 | 102 | 64 | 25.343137 | 0.788153 | 0 | 0 | 0.516667 | 0 | 0 | 0.165957 | 0.052611 | 0 | 0 | 0 | 0 | 0.35 | 1 | 0.166667 | false | 0 | 0.05 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3777c51420fd540765d7a397283b0b545744540a | 12,951 | py | Python | src/compas/datastructures/mesh/operations/collapse.py | philianeles/compas | 129a5a7e9d8832495d2bbee6ce7c6463ab50f2d1 | [
"MIT"
] | null | null | null | src/compas/datastructures/mesh/operations/collapse.py | philianeles/compas | 129a5a7e9d8832495d2bbee6ce7c6463ab50f2d1 | [
"MIT"
] | null | null | null | src/compas/datastructures/mesh/operations/collapse.py | philianeles/compas | 129a5a7e9d8832495d2bbee6ce7c6463ab50f2d1 | [
"MIT"
] | null | null | null | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
__all__ = [
'mesh_collapse_edge',
'trimesh_collapse_edge',
]
def is_collapse_legal(mesh, u, v, allow_boundary=False):
"""Verify if the requested collapse is legal fro a triangle mesh.
Parameters
----------
mesh : compas.datastructures.Mesh
The mesh.
u : str
The vertex to collapse towards.
v : str
The vertex to collapse.
Returns
-------
bool
`True` if the collapse is legal.
`False` otherwise.
"""
# collapsing of boundary vertices is currently not supported
# change this to `and` to support collapsing to or from the boundary
if not allow_boundary:
if mesh.is_vertex_on_boundary(v) or mesh.is_vertex_on_boundary(u):
return False
# check for contained faces
for nbr in mesh.halfedge[u]:
if nbr in mesh.halfedge[v]:
# check if U > V > NBR is a face
fkey = mesh.halfedge[u][v]
if fkey != mesh.halfedge[v][nbr] or fkey != mesh.halfedge[nbr][u]:
# check if V > U > NBR is a face
fkey = mesh.halfedge[v][u]
if fkey != mesh.halfedge[u][nbr] or fkey != mesh.halfedge[nbr][v]:
return False
for nbr in mesh.halfedge[v]:
if nbr in mesh.halfedge[u]:
# check if U > V > NBR is a face
fkey = mesh.halfedge[u][v]
if fkey != mesh.halfedge[v][nbr] or fkey != mesh.halfedge[nbr][u]:
# check if V > U > NBR is a face
fkey = mesh.halfedge[v][u]
if fkey != mesh.halfedge[u][nbr] or fkey != mesh.halfedge[nbr][v]:
return False
return True
def mesh_collapse_edge(self, u, v, t=0.5, allow_boundary=False, fixed=None):
"""Collapse an edge to its first or second vertex, or to an intermediate
point.
Notes:
An edge can only be collapsed if the collapse is `legal`. A collapse is
legal if it meets the following requirements:
* any vertex `w` that is a neighbor of both `u` and `v` is a face
of the mesh
* `u` and `v` are not on the boundary
* ...
See [] for a detailed explanation of these requirements.
Parameters:
u (str): The first vertex of the (half-) edge.
v (str): The second vertex of the (half-) edge.
t (float): Determines where to collapse to. If `t == 0.0` collapse
to `u`. If `t == 1.0` collapse to `v`. If `0.0 < t < 1.0`,
collapse to a point between `u` and `v`.
Returns:
None
Raises:
ValueError: If `u` and `v` are not neighbors.
"""
if t < 0.0:
raise ValueError('Parameter t should be greater than or equal to 0.')
if t > 1.0:
raise ValueError('Parameter t should be smaller than or equal to 1.')
# # collapsing of boundary vertices is currently not supported
# # change this to `and` to support collapsing to or from the boundary
# if self.is_vertex_on_boundary(u) or self.is_vertex_on_boundary(v):
# return
# # check for contained faces
# for nbr in self.halfedge[u]:
# if nbr in self.halfedge[v]:
# # check if U > V > NBR is a face
# if (self.halfedge[u][v] != self.halfedge[v][nbr] or self.halfedge[u][v] != self.halfedge[nbr][u]):
# # check if V > U > NBR is a face
# if (self.halfedge[v][u] != self.halfedge[u][nbr] or self.halfedge[v][u] != self.halfedge[nbr][v]):
# return
# for nbr in self.halfedge[v]:
# if nbr in self.halfedge[u]:
# # check if U > V > NBR is a face
# if (self.halfedge[u][v] != self.halfedge[v][nbr] or self.halfedge[u][v] != self.halfedge[nbr][u]):
# # check if V > U > NBR is a face
# if (self.halfedge[v][u] != self.halfedge[u][nbr] or self.halfedge[v][u] != self.halfedge[nbr][v]):
# return
# check collapse conditions
if not is_collapse_legal(self, u, v, allow_boundary=allow_boundary):
return False
# compare to fixed
fixed = fixed or []
if v in fixed or u in fixed:
return False
# move U
x, y, z = self.edge_point(u, v, t)
self.vertex[u]['x'] = x
self.vertex[u]['y'] = y
self.vertex[u]['z'] = z
# UV face
fkey = self.halfedge[u][v]
if fkey is None:
del self.halfedge[u][v]
else:
face = self.face_vertices(fkey)
f = len(face)
# switch between UV face sizes
# note: in a triself this is not necessary!
if f < 3:
raise Exception("Invalid self face: {}".format(fkey))
if f == 3:
# delete UV
o = face[face.index(u) - 1]
del self.halfedge[u][v]
del self.halfedge[v][o]
del self.halfedge[o][u]
del self.face[fkey]
else:
# u > v > d => u > d
d = self.face_vertex_descendant(fkey, v)
face.remove(v)
del self.halfedge[u][v]
del self.halfedge[v][d]
self.halfedge[u][d] = fkey
# VU face
fkey = self.halfedge[v][u]
if fkey is None:
del self.halfedge[v][u]
else:
face = self.face_vertices(fkey)
f = len(face)
# switch between VU face sizes
# note: in a triself this is not necessary!
if f < 3:
raise Exception("Invalid mesh face: {}".format(fkey))
if f == 3:
# delete UV
o = face[face.index(v) - 1]
del self.halfedge[v][u] # the collapsing halfedge
del self.halfedge[u][o]
del self.halfedge[o][v]
del self.face[fkey]
else:
# a > v > u => a > u
a = self.face_vertex_ancestor(fkey, v)
face.remove(v)
del self.halfedge[a][v]
del self.halfedge[v][u]
self.halfedge[a][u] = fkey
# V neighbors and halfedges coming into V
for nbr, fkey in list(self.halfedge[v].items()):
if fkey is None:
self.halfedge[u][nbr] = None
del self.halfedge[v][nbr]
else:
# a > v > nbr => a > u > nbr
face = self.face[fkey]
a = self.face_vertex_ancestor(fkey, v)
face[face.index(v)] = u
if v in self.halfedge[a]:
del self.halfedge[a][v]
del self.halfedge[v][nbr]
self.halfedge[a][u] = fkey
self.halfedge[u][nbr] = fkey
# only update what will not be updated in the previous part
# verify what this is exactly
# nbr > v > d => nbr > u > d
if v in self.halfedge[nbr]:
fkey = self.halfedge[nbr][v]
del self.halfedge[nbr][v]
self.halfedge[nbr][u] = fkey
# delete V
del self.halfedge[v]
del self.vertex[v]
# split this up into more efficient cases
# - both not on boundary
# - u on boundary
# - v on boundary
# - u and v on boundary
def trimesh_collapse_edge(self, u, v, t=0.5, allow_boundary=False, fixed=None):
"""Collapse an edge to its first or second vertex, or to an intermediate
point.
Notes
-----
An edge can only be collapsed if the collapse is `legal`. A collapse is
legal if it meets the following requirements:
* any vertex `w` that is a neighbor of both `u` and `v` is a face
of the mesh
* `u` and `v` are not on the boundary
* ...
See [] for a detailed explanation of these requirements.
Parameters
----------
u : str
The first vertex of the (half-) edge.
v : str
The second vertex of the (half-) edge.
t : float
Determines where to collapse to.
If `t == 0.0` collapse to `u`.
If `t == 1.0` collapse to `v`.
If `0.0 < t < 1.0`, collapse to a point between `u` and `v`.
Returns
-------
None
Raises
------
ValueError
If `u` and `v` are not neighbors.
Examples
--------
.. plot::
:include-source:
import compas
from compas.datastructures import Mesh
from compas.plotters import MeshPlotter
mesh = Mesh.from_obj(compas.get('faces.obj'))
plotter = MeshPlotter(mesh)
plotter.draw_vertices(text={key: key for key in mesh.vertices()}, radius=0.2)
plotter.draw_faces(text={fkey: fkey for fkey in mesh.faces()})
plotter.show()
.. plot::
:include-source:
import compas
from compas.datastructures import Mesh
from compas.plotters import MeshPlotter
from compas.topology import mesh_quads_to_triangles
mesh = Mesh.from_obj(compas.get('faces.obj'))
mesh_quads_to_triangles(mesh)
u, v = mesh.get_any_edge()
mesh.collapse_edge_tri(u, v)
plotter = MeshPlotter(mesh)
plotter.draw_vertices(text={key: key for key in mesh.vertices()}, radius=0.2)
plotter.draw_faces(text={fkey: fkey for fkey in mesh.faces()})
plotter.show()
"""
if t < 0.0:
raise ValueError('Parameter t should be greater than or equal to 0.')
if t > 1.0:
raise ValueError('Parameter t should be smaller than or equal to 1.')
# check collapse conditions
if not is_collapse_legal(self, u, v, allow_boundary=allow_boundary):
return False
# compare to fixed
fixed = fixed or []
if v in fixed or u in fixed:
return False
# move U
x, y, z = self.edge_point(u, v, t)
self.vertex[u]['x'] = x
self.vertex[u]['y'] = y
self.vertex[u]['z'] = z
# UV face
fkey = self.halfedge[u][v]
if fkey is None:
del self.halfedge[u][v]
else:
face = self.face[fkey]
o = face[face.index(u) - 1]
del self.halfedge[u][v]
del self.halfedge[v][o]
del self.halfedge[o][u]
del self.face[fkey]
if len(self.halfedge[o]) < 2:
del self.halfedge[o]
del self.vertex[o]
del self.halfedge[u][o]
# VU face
fkey = self.halfedge[v][u]
if fkey is None:
del self.halfedge[v][u]
else:
face = self.face[fkey]
o = face[face.index(v) - 1]
del self.halfedge[v][u]
del self.halfedge[u][o]
del self.halfedge[o][v]
del self.face[fkey]
if len(self.halfedge[o]) < 2:
del self.halfedge[o]
del self.vertex[o]
del self.halfedge[v][o]
# neighborhood of V
for nbr, fkey in list(self.halfedge[v].items()):
if fkey is None:
self.halfedge[u][nbr] = None
del self.halfedge[v][nbr]
else:
# a > v > nbr => a > u > nbr
face = self.face[fkey]
a = face[face.index(v) - 1]
self.face[fkey] = [a, u, nbr]
if v in self.halfedge[a]:
del self.halfedge[a][v]
del self.halfedge[v][nbr]
self.halfedge[a][u] = fkey
self.halfedge[u][nbr] = fkey
self.halfedge[nbr][a] = fkey
# nbr > v > d => nbr > u > d
if v in self.halfedge[nbr]:
self.halfedge[nbr][u] = self.halfedge[nbr][v]
del self.halfedge[nbr][v]
# delete V
del self.halfedge[v]
del self.vertex[v]
# clean up
for nu in self.halfedge[u]:
for nbr in self.halfedge[nu]:
if nbr == v:
self.halfedge[nu][u] = self.halfedge[nu][v]
del self.halfedge[nu][v]
return True
# ==============================================================================
# Main
# ==============================================================================
if __name__ == "__main__":
import compas
from compas.datastructures import Mesh
from compas.plotters import MeshPlotter
from compas.topology import mesh_quads_to_triangles
mesh = Mesh.from_obj(compas.get('faces.obj'))
mesh_quads_to_triangles(mesh)
mesh.swap_edge_tri(14, 19)
mesh.swap_edge_tri(21, 16)
mesh.collapse_edge_tri(21, 15)
plotter = MeshPlotter(mesh, figsize=(10, 7))
plotter.draw_vertices(text={key: key for key in mesh.vertices()}, radius=0.2)
plotter.draw_faces(text={fkey: fkey for fkey in mesh.faces()})
plotter.show()
| 29.772414 | 117 | 0.527064 | 1,769 | 12,951 | 3.801583 | 0.104579 | 0.149888 | 0.078067 | 0.035688 | 0.859331 | 0.806245 | 0.803717 | 0.794796 | 0.777249 | 0.755836 | 0 | 0.007591 | 0.349008 | 12,951 | 434 | 118 | 29.841014 | 0.79006 | 0.390935 | 0 | 0.738889 | 0 | 0 | 0.04282 | 0.002997 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016667 | false | 0 | 0.038889 | 0 | 0.105556 | 0.005556 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
37a90ab07c4cd8c6f7c1ec149ed7ea2b9aedb373 | 5,062 | py | Python | z2/part2/interactive/jm/random_normal_1/538415696.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 1 | 2020-04-16T12:13:47.000Z | 2020-04-16T12:13:47.000Z | z2/part2/interactive/jm/random_normal_1/538415696.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 18 | 2020-03-06T17:50:15.000Z | 2020-05-19T14:58:30.000Z | z2/part2/interactive/jm/random_normal_1/538415696.py | kozakusek/ipp-2020-testy | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | [
"MIT"
] | 18 | 2020-03-06T17:45:13.000Z | 2020-06-09T19:18:31.000Z | from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 538415696
"""
"""
random actions, total chaos
"""
board = gamma_new(5, 6, 5, 4)
assert board is not None
assert gamma_move(board, 1, 4, 5) == 1
assert gamma_move(board, 1, 1, 1) == 1
assert gamma_move(board, 2, 2, 1) == 1
assert gamma_move(board, 2, 1, 3) == 1
assert gamma_move(board, 3, 4, 0) == 1
assert gamma_golden_possible(board, 3) == 1
assert gamma_move(board, 4, 5, 1) == 0
assert gamma_move(board, 5, 0, 3) == 1
assert gamma_move(board, 1, 5, 0) == 0
assert gamma_move(board, 1, 2, 5) == 1
assert gamma_move(board, 2, 4, 4) == 1
assert gamma_move(board, 3, 4, 3) == 1
assert gamma_move(board, 3, 4, 1) == 1
assert gamma_busy_fields(board, 3) == 3
assert gamma_move(board, 4, 1, 3) == 0
assert gamma_golden_possible(board, 4) == 1
assert gamma_golden_move(board, 4, 1, 1) == 1
assert gamma_move(board, 5, 1, 3) == 0
assert gamma_move(board, 5, 1, 2) == 1
assert gamma_golden_possible(board, 5) == 1
assert gamma_move(board, 1, 2, 2) == 1
assert gamma_move(board, 1, 1, 0) == 1
assert gamma_move(board, 2, 1, 0) == 0
assert gamma_move(board, 3, 2, 0) == 1
assert gamma_move(board, 4, 4, 4) == 0
assert gamma_golden_move(board, 5, 5, 2) == 0
assert gamma_move(board, 1, 4, 1) == 0
assert gamma_move(board, 1, 3, 1) == 0
assert gamma_move(board, 2, 2, 3) == 1
assert gamma_move(board, 3, 4, 1) == 0
assert gamma_golden_possible(board, 3) == 1
assert gamma_move(board, 4, 4, 0) == 0
assert gamma_move(board, 4, 4, 5) == 0
assert gamma_free_fields(board, 4) == 15
assert gamma_move(board, 5, 4, 3) == 0
assert gamma_move(board, 5, 3, 5) == 1
assert gamma_move(board, 1, 0, 0) == 1
assert gamma_move(board, 2, 1, 2) == 0
assert gamma_move(board, 2, 1, 5) == 1
assert gamma_move(board, 3, 4, 0) == 0
assert gamma_busy_fields(board, 3) == 4
assert gamma_move(board, 4, 2, 3) == 0
assert gamma_golden_possible(board, 4) == 0
assert gamma_move(board, 5, 3, 3) == 1
assert gamma_move(board, 1, 2, 4) == 1
assert gamma_move(board, 2, 5, 0) == 0
assert gamma_move(board, 2, 0, 0) == 0
assert gamma_move(board, 3, 3, 0) == 1
assert gamma_move(board, 4, 4, 0) == 0
assert gamma_move(board, 5, 3, 5) == 0
assert gamma_move(board, 1, 0, 5) == 0
board498560140 = gamma_board(board)
assert board498560140 is not None
assert board498560140 == (".2151\n"
"..1.2\n"
"52253\n"
".51..\n"
".42.3\n"
"11333\n")
del board498560140
board498560140 = None
assert gamma_move(board, 2, 1, 5) == 0
assert gamma_move(board, 3, 0, 0) == 0
assert gamma_move(board, 3, 0, 0) == 0
assert gamma_golden_possible(board, 3) == 1
assert gamma_move(board, 4, 5, 0) == 0
assert gamma_free_fields(board, 4) == 9
assert gamma_golden_move(board, 4, 1, 1) == 0
assert gamma_move(board, 5, 0, 3) == 0
assert gamma_move(board, 1, 3, 0) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_free_fields(board, 1) == 4
assert gamma_move(board, 2, 0, 2) == 0
assert gamma_move(board, 2, 2, 4) == 0
assert gamma_move(board, 3, 4, 0) == 0
assert gamma_move(board, 4, 4, 1) == 0
assert gamma_move(board, 4, 4, 0) == 0
assert gamma_free_fields(board, 4) == 9
assert gamma_move(board, 5, 1, 1) == 0
assert gamma_move(board, 1, 4, 3) == 0
assert gamma_move(board, 1, 2, 1) == 0
assert gamma_move(board, 2, 1, 5) == 0
assert gamma_move(board, 2, 4, 0) == 0
assert gamma_move(board, 3, 4, 3) == 0
board545367159 = gamma_board(board)
assert board545367159 is not None
assert board545367159 == (".2151\n"
"..1.2\n"
"52253\n"
".51..\n"
".42.3\n"
"11333\n")
del board545367159
board545367159 = None
assert gamma_move(board, 4, 2, 0) == 0
assert gamma_free_fields(board, 4) == 9
assert gamma_move(board, 5, 1, 0) == 0
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 2, 4, 2) == 0
assert gamma_move(board, 2, 0, 2) == 0
assert gamma_free_fields(board, 2) == 4
assert gamma_move(board, 3, 1, 2) == 0
assert gamma_move(board, 3, 1, 3) == 0
assert gamma_free_fields(board, 3) == 9
assert gamma_move(board, 4, 2, 0) == 0
assert gamma_move(board, 5, 1, 3) == 0
assert gamma_move(board, 1, 4, 3) == 0
assert gamma_move(board, 1, 4, 4) == 0
assert gamma_move(board, 2, 3, 5) == 0
assert gamma_free_fields(board, 2) == 4
assert gamma_move(board, 3, 4, 3) == 0
assert gamma_move(board, 4, 2, 4) == 0
assert gamma_move(board, 4, 0, 5) == 1
assert gamma_move(board, 5, 0, 3) == 0
assert gamma_busy_fields(board, 5) == 4
assert gamma_golden_possible(board, 5) == 1
assert gamma_move(board, 1, 1, 4) == 1
assert gamma_move(board, 1, 0, 5) == 0
assert gamma_move(board, 4, 4, 3) == 0
assert gamma_move(board, 4, 0, 3) == 0
board501662886 = gamma_board(board)
assert board501662886 is not None
assert board501662886 == ("42151\n"
".11.2\n"
"52253\n"
".51..\n"
".42.3\n"
"11333\n")
del board501662886
board501662886 = None
assert gamma_move(board, 5, 2, 3) == 0
gamma_delete(board)
| 31.055215 | 46 | 0.659226 | 928 | 5,062 | 3.443966 | 0.049569 | 0.347622 | 0.375469 | 0.500626 | 0.820713 | 0.78567 | 0.728723 | 0.556008 | 0.405507 | 0.377347 | 0 | 0.140685 | 0.181351 | 5,062 | 162 | 47 | 31.246914 | 0.63055 | 0 | 0 | 0.326389 | 0 | 0 | 0.025332 | 0 | 0 | 0 | 0 | 0 | 0.75 | 1 | 0 | false | 0 | 0.006944 | 0 | 0.006944 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
80cb4b695e6809825baeacb0a862bdcb15716388 | 6,558 | py | Python | crop_align/align.py | AutoLV/NoisyFER | 353ff60bad90dd346cd6a8fc54d7a6acd5897044 | [
"MIT"
] | 15 | 2020-11-09T16:35:08.000Z | 2022-02-12T14:53:11.000Z | crop_align/align.py | AutoLV/NoisyFER | 353ff60bad90dd346cd6a8fc54d7a6acd5897044 | [
"MIT"
] | 1 | 2021-07-21T03:33:46.000Z | 2021-08-08T20:24:12.000Z | crop_align/align.py | AutoLV/NoisyFER | 353ff60bad90dd346cd6a8fc54d7a6acd5897044 | [
"MIT"
] | 3 | 2021-03-30T10:21:52.000Z | 2021-09-12T15:55:32.000Z | import cv2
import numpy as np
LEFT_EYE_INDICES = [36, 37, 38, 39, 40, 41]
RIGHT_EYE_INDICES = [42, 43, 44, 45, 46, 47]
class MyFaceAligner:
def __init__(self, desiredLeftEye=(0.35, 0.35),
desiredFaceWidth=256, desiredFaceHeight=None):
# store the facial landmark predictor, desired output left
# eye position, and desired output face width + height
# self.predictor = predictor
self.desiredLeftEye = desiredLeftEye
self.desiredFaceWidth = desiredFaceWidth
self.desiredFaceHeight = desiredFaceHeight
# if the desired face height is None, set it to be the
# desired face width (normal behavior)
if self.desiredFaceHeight is None:
self.desiredFaceHeight = self.desiredFaceWidth
def align(self, image, shape):
# convert the landmark (x, y)-coordinates to a NumPy array
# shape = self.predictor(gray, rect)
# shape = shape_to_np(shape)
# extract the left and right eye (x, y)-coordinates
# (lStart, lEnd) = FACIAL_LANDMARKS_IDXS["left_eye"]
# (rStart, rEnd) = FACIAL_LANDMARKS_IDXS["right_eye"]
# leftEyePts = shape[lStart:lEnd]
# rightEyePts = shape[rStart:rEnd]
leftEyePts = shape[:, 36:42]
rightEyePts = shape[:, 42:48]
# compute the center of mass for each eye
leftEyeCenter = leftEyePts.mean(axis=1).astype("int")
rightEyeCenter = rightEyePts.mean(axis=1).astype("int")
# compute the angle between the eye centroids
dY = rightEyeCenter[1] - leftEyeCenter[1]
dX = rightEyeCenter[0] - leftEyeCenter[0]
angle = np.degrees(np.arctan2(dY, dX))
# compute the desired right eye x-coordinate based on the
# desired x-coordinate of the left eye
desiredRightEyeX = 1.0 - self.desiredLeftEye[0]
# determine the scale of the new resulting image by taking
# the ratio of the distance between eyes in the *current*
# image to the ratio of distance between eyes in the
# *desired* image
dist = np.sqrt((dX ** 2) + (dY ** 2))
desiredDist = (desiredRightEyeX - self.desiredLeftEye[0])
desiredDist *= self.desiredFaceWidth
scale = desiredDist / dist
# compute center (x, y)-coordinates (i.e., the median point)
# between the two eyes in the input image
eyesCenter = ((leftEyeCenter[0] + rightEyeCenter[0]) // 2,
(leftEyeCenter[1] + rightEyeCenter[1]) // 2)
# grab the rotation matrix for rotating and scaling the face
M = cv2.getRotationMatrix2D(eyesCenter, angle, scale)
# update the translation component of the matrix
tX = self.desiredFaceWidth * 0.5
tY = self.desiredFaceHeight * self.desiredLeftEye[1]
M[0, 2] += (tX - eyesCenter[0])
M[1, 2] += (tY - eyesCenter[1])
# apply the affine transformation
(w, h) = (self.desiredFaceWidth, self.desiredFaceHeight)
output = cv2.warpAffine(image, M, (w, h),
flags=cv2.INTER_CUBIC)
# return the aligned face
return output
class MyFaceAligner_RAF:
def __init__(self, desiredLeftEye=(0.35, 0.35),
desiredFaceWidth=256, desiredFaceHeight=None):
# store the facial landmark predictor, desired output left
# eye position, and desired output face width + height
# self.predictor = predictor
self.desiredLeftEye = desiredLeftEye
self.desiredFaceWidth = desiredFaceWidth
self.desiredFaceHeight = desiredFaceHeight
# if the desired face height is None, set it to be the
# desired face width (normal behavior)
if self.desiredFaceHeight is None:
self.desiredFaceHeight = self.desiredFaceWidth
def align(self, image, shape):
# convert the landmark (x, y)-coordinates to a NumPy array
# shape = self.predictor(gray, rect)
# shape = shape_to_np(shape)
# extract the left and right eye (x, y)-coordinates
# (lStart, lEnd) = FACIAL_LANDMARKS_IDXS["left_eye"]
# (rStart, rEnd) = FACIAL_LANDMARKS_IDXS["right_eye"]
# leftEyePts = shape[lStart:lEnd]
# rightEyePts = shape[rStart:rEnd]
# leftEyePts = shape[:, 36:42]
# rightEyePts = shape[:, 42:48]
# compute the center of mass for each eye
leftEyeCenter = shape[:, 0].astype("int")
rightEyeCenter = shape[:, 1].astype("int")
# compute the angle between the eye centroids
dY = rightEyeCenter[1] - leftEyeCenter[1]
dX = rightEyeCenter[0] - leftEyeCenter[0]
angle = np.degrees(np.arctan2(dY, dX))
# compute the desired right eye x-coordinate based on the
# desired x-coordinate of the left eye
desiredRightEyeX = 1.0 - self.desiredLeftEye[0]
# determine the scale of the new resulting image by taking
# the ratio of the distance between eyes in the *current*
# image to the ratio of distance between eyes in the
# *desired* image
dist = np.sqrt((dX ** 2) + (dY ** 2))
desiredDist = (desiredRightEyeX - self.desiredLeftEye[0])
desiredDist *= self.desiredFaceWidth
scale = desiredDist / dist
# compute center (x, y)-coordinates (i.e., the median point)
# between the two eyes in the input image
eyesCenter = ((leftEyeCenter[0] + rightEyeCenter[0]) // 2,
(leftEyeCenter[1] + rightEyeCenter[1]) // 2)
# grab the rotation matrix for rotating and scaling the face
M = cv2.getRotationMatrix2D(eyesCenter, angle, scale)
# update the translation component of the matrix
tX = self.desiredFaceWidth * 0.5
tY = self.desiredFaceHeight * self.desiredLeftEye[1]
M[0, 2] += (tX - eyesCenter[0])
M[1, 2] += (tY - eyesCenter[1])
# apply the affine transformation
(w, h) = (self.desiredFaceWidth, self.desiredFaceHeight)
output = cv2.warpAffine(image, M, (w, h),
flags=cv2.INTER_CUBIC)
# return the aligned face
return output
#
# detector = dlib.get_frontal_face_detector()
# predictor = dlib.shape_predictor("/scratch_net/biwidl213/emotion/shape_predictor_68_face_landmarks.dat")
#
# image = cv2.imread('img.jpg')
# fa = FaceAligner(predictor, desiredLeftEye=(0.3, 0.3), desiredFaceWidth=256)
#
# faceAligned = fa.align(image, shape)
# cv2.imwrite("Aligned.jpg", faceAligned) | 40.233129 | 106 | 0.63068 | 772 | 6,558 | 5.305699 | 0.21114 | 0.043945 | 0.027832 | 0.020508 | 0.89209 | 0.885742 | 0.885742 | 0.885742 | 0.885742 | 0.885742 | 0 | 0.028691 | 0.271882 | 6,558 | 163 | 107 | 40.233129 | 0.82911 | 0.420098 | 0 | 0.818182 | 0 | 0 | 0.00321 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.060606 | false | 0 | 0.030303 | 0 | 0.151515 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
80d0d276f4738d4a0f24b22d77965e011c8911b1 | 113 | py | Python | MODULE/demo.py | ragulkesavan/Python-75-Hackathon | adda961155467a428938fc52e2d761bf5b786788 | [
"MIT"
] | null | null | null | MODULE/demo.py | ragulkesavan/Python-75-Hackathon | adda961155467a428938fc52e2d761bf5b786788 | [
"MIT"
] | null | null | null | MODULE/demo.py | ragulkesavan/Python-75-Hackathon | adda961155467a428938fc52e2d761bf5b786788 | [
"MIT"
] | null | null | null | import sale_module
sale_module.get_cust()
sale_module.get_cust()
sale_module.get_cust()
sale_module.get_vendor()
| 18.833333 | 24 | 0.840708 | 19 | 113 | 4.526316 | 0.315789 | 0.581395 | 0.604651 | 0.593023 | 0.744186 | 0.744186 | 0.744186 | 0.744186 | 0.744186 | 0.744186 | 0 | 0 | 0.053097 | 113 | 5 | 25 | 22.6 | 0.803738 | 0 | 0 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.2 | 0 | 0.2 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
80e32227c54763a77924e5476229751c9c82d03b | 106 | py | Python | simplebeep/__init__.py | Leberwurscht/simplebeep | e400b08d974828bb806b5fa30ace856fec849ee4 | [
"MIT"
] | null | null | null | simplebeep/__init__.py | Leberwurscht/simplebeep | e400b08d974828bb806b5fa30ace856fec849ee4 | [
"MIT"
] | null | null | null | simplebeep/__init__.py | Leberwurscht/simplebeep | e400b08d974828bb806b5fa30ace856fec849ee4 | [
"MIT"
] | null | null | null | from .functions import sawtooth, triangle, play_ndarray_simpleaudio, play_ndarray_gst, play_ndarray, beep
| 53 | 105 | 0.858491 | 14 | 106 | 6.142857 | 0.714286 | 0.383721 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.084906 | 106 | 1 | 106 | 106 | 0.886598 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
80fd7c63c77db2d757d225c636a743df256ef5bb | 6,132 | py | Python | cfn-macros/ECRExpander/test.py | cuddlekube/infrastructure | 18f069d1b185bc40f388744f8604b115e444c3b6 | [
"MIT"
] | 4 | 2019-01-17T20:00:54.000Z | 2020-12-23T21:27:38.000Z | cfn-macros/ECRExpander/test.py | cuddlekube/infrastructure | 18f069d1b185bc40f388744f8604b115e444c3b6 | [
"MIT"
] | null | null | null | cfn-macros/ECRExpander/test.py | cuddlekube/infrastructure | 18f069d1b185bc40f388744f8604b115e444c3b6 | [
"MIT"
] | 5 | 2019-08-05T23:00:50.000Z | 2021-11-12T16:39:03.000Z | import macro
import json
import unittest
class TestStringMethods(unittest.TestCase):
def testNonEcrPassedThrough(self):
event = {}
event["region"] = "ap-southeast-2"
event["requestId"] = "testRequest"
event["fragment"] = {"Resources": {"S3Bucket": {
"Type": "AWS::S3::Bucket"}}}
result = macro.handler(event, None)
fragment = result["fragment"]
expected_resources = ["S3Bucket"]
expected_resources.sort()
actual_resources = list(fragment["Resources"].keys())
actual_resources.sort()
self.assertEqual(expected_resources, actual_resources)
self.assertEqual(
fragment["Resources"]["S3Bucket"]["Type"], "AWS::S3::Bucket")
def testSingleRepoCreatesResource(self):
event = {}
event["region"] = "ap-southeast-2"
event["requestId"] = "testRequest"
event["fragment"] = {"Resources": {"Repos": {
"Type": "IgnoreMe::ECR::Repository", "Properties": { "Repositories": ["test_repo"]}}}}
result = macro.handler(event, None)
fragment = result["fragment"]
expected_resources = ["testRepo"]
expected_resources.sort()
actual_resources = list(fragment["Resources"].keys())
actual_resources.sort()
self.assertEqual(expected_resources, actual_resources)
self.assertEqual(fragment["Resources"]["testRepo"]["Type"], "AWS::ECR::Repository")
self.assertEqual(fragment["Resources"]["testRepo"]["Properties"]["RepositoryName"], "test_repo")
def testMultiReposCreateResources(self):
event = {}
event["region"] = "ap-southeast-2"
event["requestId"] = "testRequest"
event["fragment"] = {"Resources": {"Repos": {
"Type": "IgnoreMe::ECR::Repository", "Properties": { "Repositories": ["test_repo", "second-repo"]}}}}
result = macro.handler(event, None)
fragment = result["fragment"]
expected_resources = ["testRepo", "secondRepo"]
expected_resources.sort()
actual_resources = list(fragment["Resources"].keys())
actual_resources.sort()
self.assertEqual(expected_resources, actual_resources)
self.assertEqual(fragment["Resources"]["testRepo"]["Type"], "AWS::ECR::Repository")
self.assertEqual(fragment["Resources"]["testRepo"]["Properties"]["RepositoryName"], "test_repo")
self.assertEqual(fragment["Resources"]["secondRepo"]["Type"], "AWS::ECR::Repository")
self.assertEqual(fragment["Resources"]["secondRepo"]["Properties"]["RepositoryName"], "second-repo")
def testSingleRepoCreatesOutput(self):
event = {}
event["region"] = "ap-southeast-2"
event["requestId"] = "testRequest"
event["fragment"] = {"Resources": {"Repos": {
"Type": "IgnoreMe::ECR::Repository", "Properties": {"Repositories": ["test_repo"]}}}}
result = macro.handler(event, None)
fragment = result["fragment"]
expected_resources = ["testRepoOutput"]
expected_resources.sort()
actual_resources = list(fragment["Outputs"].keys())
actual_resources.sort()
self.assertEqual(expected_resources, actual_resources)
self.assertEqual(fragment["Resources"]["testRepo"]["Type"], "AWS::ECR::Repository")
self.assertEqual(fragment["Resources"]["testRepo"]["Properties"]["RepositoryName"], "test_repo")
def testTagsAreCarriedOver(self):
event = {}
event["region"] = "ap-southeast-2"
event["requestId"] = "testRequest"
tags = [{"Key": "Environment", "Value": "Test"}]
event["fragment"] = {"Resources": {"Repos": {
"Type": "IgnoreMe::ECR::Repository", "Properties": {"Repositories": ["test_repo"],"Tags": tags}}}}
result = macro.handler(event, None)
fragment = result["fragment"]
expected_resources = ["testRepo"]
expected_resources.sort()
actual_resources = list(fragment["Resources"].keys())
actual_resources.sort()
self.assertEqual(expected_resources, actual_resources)
self.assertEqual(fragment["Resources"]["testRepo"]["Type"], "AWS::ECR::Repository")
self.assertEqual(fragment["Resources"]["testRepo"]["Properties"]["Tags"], tags)
def testLifecyclePoliciesAreCarriedOver(self):
event = {}
event["region"] = "ap-southeast-2"
event["requestId"] = "testRequest"
tags = [{"Key": "Environment", "Value": "Test"}]
event["fragment"] = {"Resources": {"Repos": {
"Type": "IgnoreMe::ECR::Repository", "Properties": {"Repositories": ["test_repo"], "LifecyclePolicy": tags}}}}
result = macro.handler(event, None)
fragment = result["fragment"]
expected_resources = ["testRepo"]
expected_resources.sort()
actual_resources = list(fragment["Resources"].keys())
actual_resources.sort()
self.assertEqual(expected_resources, actual_resources)
self.assertEqual(fragment["Resources"]["testRepo"]["Type"], "AWS::ECR::Repository")
self.assertEqual(fragment["Resources"]["testRepo"]["Properties"]["LifecyclePolicy"], tags)
def testPolicyTextsAreCarriedOver(self):
event = {}
event["region"] = "ap-southeast-2"
event["requestId"] = "testRequest"
tags = [{"Key": "Environment", "Value": "Test"}]
event["fragment"] = {"Resources": {"Repos": {
"Type": "IgnoreMe::ECR::Repository", "Properties": {"Repositories": ["test_repo"], "RepositoryPolicyText": tags}}}}
result = macro.handler(event, None)
fragment = result["fragment"]
expected_resources = ["testRepo"]
expected_resources.sort()
actual_resources = list(fragment["Resources"].keys())
actual_resources.sort()
self.assertEqual(expected_resources, actual_resources)
self.assertEqual(fragment["Resources"]["testRepo"]["Type"], "AWS::ECR::Repository")
self.assertEqual(fragment["Resources"]["testRepo"]["Properties"]["RepositoryPolicyText"], tags)
if __name__ == '__main__':
unittest.main()
| 48.666667 | 127 | 0.625897 | 538 | 6,132 | 7.024164 | 0.115242 | 0.125959 | 0.091294 | 0.127018 | 0.874835 | 0.861074 | 0.861074 | 0.8362 | 0.82244 | 0.82244 | 0 | 0.002448 | 0.20075 | 6,132 | 125 | 128 | 49.056 | 0.768619 | 0 | 0 | 0.741379 | 0 | 0 | 0.290444 | 0.024462 | 0 | 0 | 0 | 0 | 0.189655 | 1 | 0.060345 | false | 0.008621 | 0.025862 | 0 | 0.094828 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
209356cb7c683658b62d31c5794d666b89a56f11 | 37,705 | py | Python | src/uproot/models/TGraph.py | kkothari2001/uproot4 | e184ff5ca75d95953e94e4c2e7928d725b46de7e | [
"BSD-3-Clause"
] | null | null | null | src/uproot/models/TGraph.py | kkothari2001/uproot4 | e184ff5ca75d95953e94e4c2e7928d725b46de7e | [
"BSD-3-Clause"
] | null | null | null | src/uproot/models/TGraph.py | kkothari2001/uproot4 | e184ff5ca75d95953e94e4c2e7928d725b46de7e | [
"BSD-3-Clause"
] | null | null | null | # BSD 3-Clause License; see https://github.com/scikit-hep/uproot4/blob/main/LICENSE
"""
This module defines versioned models for ``TLeaf`` and its subclasses.
"""
import struct
import numpy
import uproot
import uproot._util
import uproot.behaviors.TGraph
import uproot.behaviors.TGraphAsymmErrors
import uproot.behaviors.TGraphErrors
import uproot.deserialization
import uproot.model
import uproot.models.TH
import uproot.serialization
_rawstreamer_TGraph_v4 = (
None,
b'@\x00\x06s\xff\xff\xff\xffTStreamerInfo\x00@\x00\x06]\x00\t@\x00\x00\x14\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x06TGraph\x00\x05\xf7\xf4e\x00\x00\x00\x04@\x00\x067\xff\xff\xff\xffTObjArray\x00@\x00\x06%\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x00@\x00\x00\x8d\xff\xff\xff\xffTStreamerBase\x00@\x00\x00w\x00\x03@\x00\x00m\x00\x04@\x00\x00>\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TNamed*The basis for a named object (name, title)\x00\x00\x00C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xdf\xb7J<\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x01@\x00\x00t\xff\xff\xff\xffTStreamerBase\x00@\x00\x00^\x00\x03@\x00\x00T\x00\x04@\x00\x00%\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttLine\x0fLine attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x07EI\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00y\xff\xff\xff\xffTStreamerBase\x00@\x00\x00c\x00\x03@\x00\x00Y\x00\x04@\x00\x00*\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08TAttFill\x14Fill area attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xd9*\x92\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00x\xff\xff\xff\xffTStreamerBase\x00@\x00\x00b\x00\x03@\x00\x00X\x00\x04@\x00\x00)\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nTAttMarker\x11Marker attributes\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00)\x1d\x8b\xec\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x02@\x00\x00\x81\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00f\x00\x02@\x00\x00`\x00\x04@\x00\x002\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fNpoints\x1cNumber of points <= fMaxSize\x00\x00\x00\x06\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03int@\x00\x00\x96\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00x\x00\x02@\x00\x00^\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fX\x1c[fNpoints] array of X points\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph@\x00\x00\x96\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00x\x00\x02@\x00\x00^\x00\x04@\x00\x00,\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x02fY\x1c[fNpoints] array of Y points\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph@\x00\x00\x9a\xff\xff\xff\xffTStreamerObjectPointer\x00@\x00\x00{\x00\x02@\x00\x00u\x00\x04@\x00\x00D\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfFunctions,Pointer to list of functions (fits and user)\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06TList*@\x00\x00\x97\xff\xff\xff\xffTStreamerObjectPointer\x00@\x00\x00x\x00\x02@\x00\x00r\x00\x04@\x00\x00B\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\nfHistogram*Pointer to histogram used for drawing axis\x00\x00\x00@\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05TH1F*@\x00\x00\x8a\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMinimum"Minimum value for plotting along y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double@\x00\x00\x8a\xff\xff\xff\xffTStreamerBasicType\x00@\x00\x00o\x00\x02@\x00\x00i\x00\x04@\x00\x008\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x08fMaximum"Maximum value for plotting along y\x00\x00\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06double\x00',
"TGraph",
4,
)
class Model_TGraph_v4(uproot.behaviors.TGraph.TGraph, uproot.model.VersionedModel):
"""
A :doc:`uproot.model.VersionedModel` for ``TGraph`` version 4.
"""
behaviors = (uproot.behaviors.TGraph.TGraph,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"memberwise serialization of {}\nin file {}".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TNamed", 1).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._bases.append(
file.class_named("TAttLine", 2).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._bases.append(
file.class_named("TAttFill", 2).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._bases.append(
file.class_named("TAttMarker", 2).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
self._members["fNpoints"] = cursor.field(chunk, self._format0, context)
tmp = self._dtype0
if context.get("speedbump", True):
cursor.skip(1)
self._members["fX"] = cursor.array(chunk, self.member("fNpoints"), tmp, context)
tmp = self._dtype1
if context.get("speedbump", True):
cursor.skip(1)
self._members["fY"] = cursor.array(chunk, self.member("fNpoints"), tmp, context)
self._members["fFunctions"] = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self
)
self._members["fHistogram"] = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self
)
self._members["fMinimum"], self._members["fMaximum"] = cursor.fields(
chunk, self._format1, context
)
def read_member_n(self, chunk, cursor, context, file, member_index):
if member_index == 0:
self._bases.append(
file.class_named("TNamed", 1).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
if member_index == 1:
self._bases.append(
file.class_named("TAttLine", 2).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
if member_index == 2:
self._bases.append(
file.class_named("TAttFill", 2).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
if member_index == 3:
self._bases.append(
file.class_named("TAttMarker", 2).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
if member_index == 4:
self._members["fNpoints"] = cursor.field(
chunk, self._format_memberwise0, context
)
if member_index == 5:
tmp = self._dtype0
if context.get("speedbump", True):
cursor.skip(1)
self._members["fX"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
if member_index == 6:
tmp = self._dtype1
if context.get("speedbump", True):
cursor.skip(1)
self._members["fY"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
if member_index == 7:
self._members["fFunctions"] = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self
)
if member_index == 8:
self._members["fHistogram"] = uproot.deserialization.read_object_any(
chunk, cursor, context, file, self._file, self
)
if member_index == 9:
self._members["fMinimum"] = cursor.field(
chunk, self._format_memberwise1, context
)
if member_index == 10:
self._members["fMaximum"] = cursor.field(
chunk, self._format_memberwise2, context
)
@classmethod
def strided_interpretation(
cls, file, header=False, tobject_header=True, breadcrumbs=(), original=None
):
if cls in breadcrumbs:
raise uproot.interpretation.objects.CannotBeStrided(
"classes that can contain members of the same type cannot be strided because the depth of instances is unbounded"
)
breadcrumbs = breadcrumbs + (cls,)
members = []
if header:
members.append(("@num_bytes", numpy.dtype(">u4")))
members.append(("@instance_version", numpy.dtype(">u2")))
members.extend(
file.class_named("TNamed", 1)
.strided_interpretation(file, header, tobject_header, breadcrumbs)
.members
)
members.extend(
file.class_named("TAttLine", 2)
.strided_interpretation(file, header, tobject_header, breadcrumbs)
.members
)
members.extend(
file.class_named("TAttFill", 2)
.strided_interpretation(file, header, tobject_header, breadcrumbs)
.members
)
members.extend(
file.class_named("TAttMarker", 2)
.strided_interpretation(file, header, tobject_header, breadcrumbs)
.members
)
members.append(("fNpoints", numpy.dtype(">u4")))
raise uproot.interpretation.objects.CannotBeStrided(
"class members defined by Model_TStreamerBasicPointer of type double* in member fX of class TGraph"
)
raise uproot.interpretation.objects.CannotBeStrided(
"class members defined by Model_TStreamerBasicPointer of type double* in member fY of class TGraph"
)
raise uproot.interpretation.objects.CannotBeStrided(
"class members defined by Model_TStreamerObjectPointer of type TList* in member fFunctions of class TGraph"
)
raise uproot.interpretation.objects.CannotBeStrided(
"class members defined by Model_TStreamerObjectPointer of type TH1F* in member fHistogram of class TGraph"
)
members.append(("fMinimum", numpy.dtype(">f8")))
members.append(("fMaximum", numpy.dtype(">f8")))
return uproot.interpretation.objects.AsStridedObjects(
cls, members, original=original
)
@classmethod
def awkward_form(
cls, file, index_format="i64", header=False, tobject_header=True, breadcrumbs=()
):
from awkward.forms import ListOffsetForm, RecordForm
if cls in breadcrumbs:
raise uproot.interpretation.objects.CannotBeAwkward(
"classes that can contain members of the same type cannot be Awkward Arrays because the depth of instances is unbounded"
)
breadcrumbs = breadcrumbs + (cls,)
contents = {}
if header:
contents["@num_bytes"] = uproot._util.awkward_form(
numpy.dtype("u4"),
file,
index_format,
header,
tobject_header,
breadcrumbs,
)
contents["@instance_version"] = uproot._util.awkward_form(
numpy.dtype("u2"),
file,
index_format,
header,
tobject_header,
breadcrumbs,
)
contents.update(
file.class_named("TNamed", 1)
.awkward_form(file, index_format, header, tobject_header, breadcrumbs)
.contents
)
contents.update(
file.class_named("TAttLine", 2)
.awkward_form(file, index_format, header, tobject_header, breadcrumbs)
.contents
)
contents.update(
file.class_named("TAttFill", 2)
.awkward_form(file, index_format, header, tobject_header, breadcrumbs)
.contents
)
contents.update(
file.class_named("TAttMarker", 2)
.awkward_form(file, index_format, header, tobject_header, breadcrumbs)
.contents
)
contents["fNpoints"] = uproot._util.awkward_form(
numpy.dtype(">u4"), file, index_format, header, tobject_header, breadcrumbs
)
contents["fX"] = ListOffsetForm(
index_format,
uproot._util.awkward_form(
cls._dtype0, file, index_format, header, tobject_header, breadcrumbs
),
parameters={
"uproot": {"as": "TStreamerBasicPointer", "count_name": "fNpoints"}
},
)
contents["fY"] = ListOffsetForm(
index_format,
uproot._util.awkward_form(
cls._dtype1, file, index_format, header, tobject_header, breadcrumbs
),
parameters={
"uproot": {"as": "TStreamerBasicPointer", "count_name": "fNpoints"}
},
)
contents["fMinimum"] = uproot._util.awkward_form(
numpy.dtype(">f8"), file, index_format, header, tobject_header, breadcrumbs
)
contents["fMaximum"] = uproot._util.awkward_form(
numpy.dtype(">f8"), file, index_format, header, tobject_header, breadcrumbs
)
return RecordForm(contents, parameters={"__record__": "TGraph"})
_format0 = struct.Struct(">I")
_format1 = struct.Struct(">dd")
_format_memberwise0 = struct.Struct(">I")
_format_memberwise1 = struct.Struct(">d")
_format_memberwise2 = struct.Struct(">d")
_dtype0 = numpy.dtype(">f8")
_dtype1 = numpy.dtype(">f8")
base_names_versions = [
("TNamed", 1),
("TAttLine", 2),
("TAttFill", 2),
("TAttMarker", 2),
]
member_names = [
"fNpoints",
"fX",
"fY",
"fFunctions",
"fHistogram",
"fMinimum",
"fMaximum",
]
class_flags = {"has_read_object_any": True}
class_rawstreamers = (
uproot.models.TH._rawstreamer_THashList_v0,
uproot.models.TH._rawstreamer_TAttAxis_v4,
uproot.models.TH._rawstreamer_TAxis_v10,
uproot.models.TH._rawstreamer_TH1_v8,
uproot.models.TH._rawstreamer_TH1F_v3,
uproot.models.TH._rawstreamer_TCollection_v3,
uproot.models.TH._rawstreamer_TSeqCollection_v0,
uproot.models.TH._rawstreamer_TList_v5,
uproot.models.TH._rawstreamer_TAttMarker_v2,
uproot.models.TH._rawstreamer_TAttFill_v2,
uproot.models.TH._rawstreamer_TAttLine_v2,
uproot.models.TH._rawstreamer_TString_v2,
uproot.models.TH._rawstreamer_TObject_v1,
uproot.models.TH._rawstreamer_TNamed_v1,
_rawstreamer_TGraph_v4,
)
writable = True
def _serialize(self, out, header, name, tobject_flags):
where = len(out)
for x in self._bases:
x._serialize(out, True, name, tobject_flags)
raise NotImplementedError("FIXME")
if header:
num_bytes = sum(len(x) for x in out[where:])
version = 4
out.insert(where, uproot.serialization.numbytes_version(num_bytes, version))
class Model_TGraph(uproot.model.DispatchByVersion):
"""
A :doc:`uproot.model.DispatchByVersion` for ``TGraph``.
"""
known_versions = {4: Model_TGraph_v4}
class Model_TGraphErrors_v3(
uproot.behaviors.TGraphErrors.TGraphErrors, uproot.model.VersionedModel
):
"""
A :doc:`uproot.model.VersionedModel` for ``TGraphErrors`` version 3.
"""
behaviors = (uproot.behaviors.TGraphErrors.TGraphErrors,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"memberwise serialization of {}\nin file {}".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TGraph", 4).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
tmp = self._dtype0
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEX"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
tmp = self._dtype1
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEY"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
def read_member_n(self, chunk, cursor, context, file, member_index):
if member_index == 0:
self._bases.append(
file.class_named("TGraph", 4).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
if member_index == 1:
tmp = self._dtype0
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEX"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
if member_index == 2:
tmp = self._dtype1
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEY"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
@classmethod
def strided_interpretation(
cls, file, header=False, tobject_header=True, breadcrumbs=(), original=None
):
if cls in breadcrumbs:
raise uproot.interpretation.objects.CannotBeStrided(
"classes that can contain members of the same type cannot be strided because the depth of instances is unbounded"
)
breadcrumbs = breadcrumbs + (cls,)
members = []
if header:
members.append(("@num_bytes", numpy.dtype(">u4")))
members.append(("@instance_version", numpy.dtype(">u2")))
members.extend(
file.class_named("TGraph", 4)
.strided_interpretation(file, header, tobject_header, breadcrumbs)
.members
)
raise uproot.interpretation.objects.CannotBeStrided(
"class members defined by Model_TStreamerBasicPointer of type double* in member fEX of class TGraphErrors"
)
raise uproot.interpretation.objects.CannotBeStrided(
"class members defined by Model_TStreamerBasicPointer of type double* in member fEY of class TGraphErrors"
)
return uproot.interpretation.objects.AsStridedObjects(
cls, members, original=original
)
@classmethod
def awkward_form(
cls, file, index_format="i64", header=False, tobject_header=True, breadcrumbs=()
):
from awkward.forms import ListOffsetForm, RecordForm
if cls in breadcrumbs:
raise uproot.interpretation.objects.CannotBeAwkward(
"classes that can contain members of the same type cannot be Awkward Arrays because the depth of instances is unbounded"
)
breadcrumbs = breadcrumbs + (cls,)
contents = {}
if header:
contents["@num_bytes"] = uproot._util.awkward_form(
numpy.dtype("u4"),
file,
index_format,
header,
tobject_header,
breadcrumbs,
)
contents["@instance_version"] = uproot._util.awkward_form(
numpy.dtype("u2"),
file,
index_format,
header,
tobject_header,
breadcrumbs,
)
contents.update(
file.class_named("TGraph", 4)
.awkward_form(file, index_format, header, tobject_header, breadcrumbs)
.contents
)
contents["fEX"] = ListOffsetForm(
index_format,
uproot._util.awkward_form(
cls._dtype0, file, index_format, header, tobject_header, breadcrumbs
),
parameters={
"uproot": {"as": "TStreamerBasicPointer", "count_name": "fNpoints"}
},
)
contents["fEY"] = ListOffsetForm(
index_format,
uproot._util.awkward_form(
cls._dtype1, file, index_format, header, tobject_header, breadcrumbs
),
parameters={
"uproot": {"as": "TStreamerBasicPointer", "count_name": "fNpoints"}
},
)
return RecordForm(contents, parameters={"__record__": "TGraphErrors"})
_dtype0 = numpy.dtype(">f8")
_dtype1 = numpy.dtype(">f8")
base_names_versions = [("TGraph", 4)]
member_names = ["fEX", "fEY"]
class_flags = {}
class_rawstreamers = (
uproot.models.TH._rawstreamer_THashList_v0,
uproot.models.TH._rawstreamer_TAttAxis_v4,
uproot.models.TH._rawstreamer_TAxis_v10,
uproot.models.TH._rawstreamer_TH1_v8,
uproot.models.TH._rawstreamer_TH1F_v3,
uproot.models.TH._rawstreamer_TCollection_v3,
uproot.models.TH._rawstreamer_TSeqCollection_v0,
uproot.models.TH._rawstreamer_TList_v5,
uproot.models.TH._rawstreamer_TAttMarker_v2,
uproot.models.TH._rawstreamer_TAttFill_v2,
uproot.models.TH._rawstreamer_TAttLine_v2,
uproot.models.TH._rawstreamer_TString_v2,
uproot.models.TH._rawstreamer_TObject_v1,
uproot.models.TH._rawstreamer_TNamed_v1,
_rawstreamer_TGraph_v4,
(
None,
b"@\x00\x02\x1a\xff\xff\xff\xffTStreamerInfo\x00@\x00\x02\x04\x00\t@\x00\x00\x1a\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x0cTGraphErrors\x00*|\xe3\x0f\x00\x00\x00\x03@\x00\x01\xd8\xff\xff\xff\xffTObjArray\x00@\x00\x01\xc6\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00@\x00\x00w\xff\xff\xff\xffTStreamerBase\x00@\x00\x00a\x00\x03@\x00\x00W\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TGraph\x14Graph graphics class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\xf7\xf4e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x04@\x00\x00\x97\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00y\x00\x02@\x00\x00_\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03fEX\x1c[fNpoints] array of X errors\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph@\x00\x00\x97\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00y\x00\x02@\x00\x00_\x00\x04@\x00\x00-\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x03fEY\x1c[fNpoints] array of Y errors\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph\x00",
"TGraphErrors",
3,
),
)
writable = True
def _serialize(self, out, header, name, tobject_flags):
where = len(out)
for x in self._bases:
x._serialize(out, True, name, tobject_flags)
raise NotImplementedError("FIXME")
if header:
num_bytes = sum(len(x) for x in out[where:])
version = 3
out.insert(where, uproot.serialization.numbytes_version(num_bytes, version))
class Model_TGraphErrors(uproot.model.DispatchByVersion):
"""
A :doc:`uproot.model.DispatchByVersion` for ``TGraphErrors``.
"""
known_versions = {3: Model_TGraphErrors_v3}
class Model_TGraphAsymmErrors_v3(
uproot.behaviors.TGraphAsymmErrors.TGraphAsymmErrors, uproot.model.VersionedModel
):
"""
A :doc:`uproot.model.VersionedModel` for ``TGraphAsymmErrors`` version 3.
"""
behaviors = (uproot.behaviors.TGraphAsymmErrors.TGraphAsymmErrors,)
def read_members(self, chunk, cursor, context, file):
if self.is_memberwise:
raise NotImplementedError(
"memberwise serialization of {}\nin file {}".format(
type(self).__name__, self.file.file_path
)
)
self._bases.append(
file.class_named("TGraph", 4).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
tmp = self._dtype0
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEXlow"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
tmp = self._dtype1
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEXhigh"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
tmp = self._dtype2
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEYlow"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
tmp = self._dtype3
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEYhigh"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
def read_member_n(self, chunk, cursor, context, file, member_index):
if member_index == 0:
self._bases.append(
file.class_named("TGraph", 4).read(
chunk,
cursor,
context,
file,
self._file,
self._parent,
concrete=self.concrete,
)
)
if member_index == 1:
tmp = self._dtype0
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEXlow"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
if member_index == 2:
tmp = self._dtype1
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEXhigh"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
if member_index == 3:
tmp = self._dtype2
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEYlow"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
if member_index == 4:
tmp = self._dtype3
if context.get("speedbump", True):
cursor.skip(1)
self._members["fEYhigh"] = cursor.array(
chunk, self.member("fNpoints"), tmp, context
)
@classmethod
def strided_interpretation(
cls, file, header=False, tobject_header=True, breadcrumbs=(), original=None
):
if cls in breadcrumbs:
raise uproot.interpretation.objects.CannotBeStrided(
"classes that can contain members of the same type cannot be strided because the depth of instances is unbounded"
)
breadcrumbs = breadcrumbs + (cls,)
members = []
if header:
members.append(("@num_bytes", numpy.dtype(">u4")))
members.append(("@instance_version", numpy.dtype(">u2")))
members.extend(
file.class_named("TGraph", 4)
.strided_interpretation(file, header, tobject_header, breadcrumbs)
.members
)
raise uproot.interpretation.objects.CannotBeStrided(
"class members defined by Model_TStreamerBasicPointer of type double* in member fEXlow of class TGraphAsymmErrors"
)
raise uproot.interpretation.objects.CannotBeStrided(
"class members defined by Model_TStreamerBasicPointer of type double* in member fEXhigh of class TGraphAsymmErrors"
)
raise uproot.interpretation.objects.CannotBeStrided(
"class members defined by Model_TStreamerBasicPointer of type double* in member fEYlow of class TGraphAsymmErrors"
)
raise uproot.interpretation.objects.CannotBeStrided(
"class members defined by Model_TStreamerBasicPointer of type double* in member fEYhigh of class TGraphAsymmErrors"
)
return uproot.interpretation.objects.AsStridedObjects(
cls, members, original=original
)
@classmethod
def awkward_form(
cls, file, index_format="i64", header=False, tobject_header=True, breadcrumbs=()
):
from awkward.forms import ListOffsetForm, RecordForm
if cls in breadcrumbs:
raise uproot.interpretation.objects.CannotBeAwkward(
"classes that can contain members of the same type cannot be Awkward Arrays because the depth of instances is unbounded"
)
breadcrumbs = breadcrumbs + (cls,)
contents = {}
if header:
contents["@num_bytes"] = uproot._util.awkward_form(
numpy.dtype("u4"),
file,
index_format,
header,
tobject_header,
breadcrumbs,
)
contents["@instance_version"] = uproot._util.awkward_form(
numpy.dtype("u2"),
file,
index_format,
header,
tobject_header,
breadcrumbs,
)
contents.update(
file.class_named("TGraph", 4)
.awkward_form(file, index_format, header, tobject_header, breadcrumbs)
.contents
)
contents["fEXlow"] = ListOffsetForm(
index_format,
uproot._util.awkward_form(
cls._dtype0, file, index_format, header, tobject_header, breadcrumbs
),
parameters={
"uproot": {"as": "TStreamerBasicPointer", "count_name": "fNpoints"}
},
)
contents["fEXhigh"] = ListOffsetForm(
index_format,
uproot._util.awkward_form(
cls._dtype1, file, index_format, header, tobject_header, breadcrumbs
),
parameters={
"uproot": {"as": "TStreamerBasicPointer", "count_name": "fNpoints"}
},
)
contents["fEYlow"] = ListOffsetForm(
index_format,
uproot._util.awkward_form(
cls._dtype2, file, index_format, header, tobject_header, breadcrumbs
),
parameters={
"uproot": {"as": "TStreamerBasicPointer", "count_name": "fNpoints"}
},
)
contents["fEYhigh"] = ListOffsetForm(
index_format,
uproot._util.awkward_form(
cls._dtype3, file, index_format, header, tobject_header, breadcrumbs
),
parameters={
"uproot": {"as": "TStreamerBasicPointer", "count_name": "fNpoints"}
},
)
return RecordForm(contents, parameters={"__record__": "TGraphAsymmErrors"})
_dtype0 = numpy.dtype(">f8")
_dtype1 = numpy.dtype(">f8")
_dtype2 = numpy.dtype(">f8")
_dtype3 = numpy.dtype(">f8")
base_names_versions = [("TGraph", 4)]
member_names = ["fEXlow", "fEXhigh", "fEYlow", "fEYhigh"]
class_flags = {}
class_rawstreamers = (
uproot.models.TH._rawstreamer_THashList_v0,
uproot.models.TH._rawstreamer_TAttAxis_v4,
uproot.models.TH._rawstreamer_TAxis_v10,
uproot.models.TH._rawstreamer_TH1_v8,
uproot.models.TH._rawstreamer_TH1F_v3,
uproot.models.TH._rawstreamer_TCollection_v3,
uproot.models.TH._rawstreamer_TSeqCollection_v0,
uproot.models.TH._rawstreamer_TList_v5,
uproot.models.TH._rawstreamer_TAttMarker_v2,
uproot.models.TH._rawstreamer_TAttFill_v2,
uproot.models.TH._rawstreamer_TAttLine_v2,
uproot.models.TH._rawstreamer_TString_v2,
uproot.models.TH._rawstreamer_TObject_v1,
uproot.models.TH._rawstreamer_TNamed_v1,
_rawstreamer_TGraph_v4,
(
None,
b"@\x00\x03u\xff\xff\xff\xffTStreamerInfo\x00@\x00\x03_\x00\t@\x00\x00\x1f\x00\x01\x00\x01\x00\x00\x00\x00\x03\x01\x00\x00\x11TGraphAsymmErrors\x00\xccF\xaf;\x00\x00\x00\x03@\x00\x03.\xff\xff\xff\xffTObjArray\x00@\x00\x03\x1c\x00\x03\x00\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00@\x00\x00w\xff\xff\xff\xffTStreamerBase\x00@\x00\x00a\x00\x03@\x00\x00W\x00\x04@\x00\x00(\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06TGraph\x14Graph graphics class\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\xf7\xf4e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04BASE\x00\x00\x00\x04@\x00\x00\x9e\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00\x80\x00\x02@\x00\x00f\x00\x04@\x00\x004\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fEXlow [fNpoints] array of X low errors\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph@\x00\x00\xa0\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00\x82\x00\x02@\x00\x00h\x00\x04@\x00\x006\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fEXhigh![fNpoints] array of X high errors\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph@\x00\x00\x9e\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00\x80\x00\x02@\x00\x00f\x00\x04@\x00\x004\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x06fEYlow [fNpoints] array of Y low errors\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph@\x00\x00\xa0\xff\xff\xff\xffTStreamerBasicPointer\x00@\x00\x00\x82\x00\x02@\x00\x00h\x00\x04@\x00\x006\x00\x01\x00\x01\x00\x00\x00\x00\x03\x00\x00\x00\x07fEYhigh![fNpoints] array of Y high errors\x00\x00\x000\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07double*\x00\x00\x00\x04\x08fNpoints\x06TGraph\x00",
"TGraphAsymmErrors",
3,
),
)
writable = True
def _serialize(self, out, header, name, tobject_flags):
where = len(out)
for x in self._bases:
x._serialize(out, True, name, tobject_flags)
raise NotImplementedError("FIXME")
if header:
num_bytes = sum(len(x) for x in out[where:])
version = 3
out.insert(where, uproot.serialization.numbytes_version(num_bytes, version))
class Model_TGraphAsymmErrors(uproot.model.DispatchByVersion):
"""
A :doc:`uproot.model.DispatchByVersion` for ``TGraphAsymmErrors``.
"""
known_versions = {3: Model_TGraphAsymmErrors_v3}
uproot.classes["TGraph"] = Model_TGraph
uproot.classes["TGraphErrors"] = Model_TGraphErrors
uproot.classes["TGraphAsymmErrors"] = Model_TGraphAsymmErrors
| 44.994033 | 4,158 | 0.602148 | 4,446 | 37,705 | 4.979982 | 0.075798 | 0.226006 | 0.267468 | 0.286708 | 0.881984 | 0.866357 | 0.843413 | 0.837586 | 0.830902 | 0.814643 | 0 | 0.112701 | 0.273545 | 37,705 | 837 | 4,159 | 45.04779 | 0.69563 | 0.014428 | 0 | 0.705882 | 0 | 0.003922 | 0.303025 | 0.211963 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019608 | false | 0 | 0.018301 | 0 | 0.098039 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
20a25bdb52885040746212fd56dad888fefdc147 | 3,834 | py | Python | centinel/primitives/foctor_core/foctor_args_error.py | mikiec84/centinel | 39fc263e71e85135fa3d65513e1d417ef76388ea | [
"MIT"
] | 29 | 2015-02-12T22:39:27.000Z | 2022-01-25T13:03:18.000Z | centinel/primitives/foctor_core/foctor_args_error.py | mikiec84/centinel | 39fc263e71e85135fa3d65513e1d417ef76388ea | [
"MIT"
] | 158 | 2015-01-03T02:29:58.000Z | 2021-02-05T18:35:56.000Z | centinel/primitives/foctor_core/foctor_args_error.py | mikiec84/centinel | 39fc263e71e85135fa3d65513e1d417ef76388ea | [
"MIT"
] | 22 | 2015-02-11T05:08:49.000Z | 2022-01-25T13:03:33.000Z | __author__ = 'rishabn'
def fp_crawler_mode_error():
str_err = "Please specify a crawl mode: standard, tor, search_log, or login_log \n"
str_err += "python front-page-crawler.py <crawl-mode>"
print str_err
raise SystemExit
def fp_crawler_standard_mode_error():
str_err = "Usage for standard crawl: python front-page-crawler.py standard <site-list> <start-index> "
str_err += "<end-index> <capture-path> <display 0/1> <process-tag>"
print str_err
raise SystemExit
def fp_crawler_tor_mode_error():
str_err = "Usage for tor crawl: python front-page-crawler.py tor <site-list> <start-index> "
str_err += "<end-index> <capture-path> <display 0/1> <process-tag> <exit-ip> <tor-port>"
print str_err
raise SystemExit
def fp_crawler_search_mode_error():
str_err = "Usage for search-log crawl: python front-page-crawler.py search_log <site-list> <start-index> "
str_err += "<end-index> <capture-path> <display 0/1> <process-tag> <output-rule-log>"
print str_err
raise SystemExit
def fp_crawler_login_mode_error():
str_err = "Usage for login-log crawl: python front-page-crawler.py login_log <site-list> <start-index> "
str_err += "<end-index> <capture-path> <display 0/1> <process-tag>"
print str_err
raise SystemExit
def search_crawler_mode_error():
str_err = "Please specify a search crawl mode: generate rules (rule-gen), " \
"search from existing rules (search-tor/search-standard)"
str_err += "\npython search-crawler.py <search-crawl-mode>"
print str_err
raise SystemExit
def search_crawler_gen_rules_error():
str_err = "Usage for search-log crawl: python search-crawler.py rule-gen <site-list> <start-index> "
str_err += "<end-index> <capture-path> <display 0/1> <process-tag> <output-rule-log>"
print str_err
raise SystemExit
def search_crawler_tor_mode_error():
str_err = "Usage for search-log crawl: python search-crawler.py search-tor <rule-list> <start-index> "
str_err += "<end-index> <capture-path> <display 0/1> <process-tag> <exit-ip> <tor-port>"
print str_err
raise SystemExit
def search_crawler_standard_mode_error():
str_err = "Usage for search-log crawl: python search-crawler.py search-standard <rule-list> <start-index> "
str_err += "<end-index> <capture-path> <display 0/1> <process-tag>"
print str_err
raise SystemExit
def login_crawler_mode_error():
str_err = "Please specify a login crawl mode: generate rules (rule-gen), " \
"search from existing rules (login-tor/login-standard)"
str_err += "\npython login-crawler.py <login-crawl-mode>"
print str_err
raise SystemExit
def login_crawler_compatible_sites_error():
str_err = "Usage for login crawl: python login-crawler.py login-standard <site-list> <credentials-file> "
str_err += "<start-index> <end-index> <capture-path> <display 0/1> <process-tag>"
print str_err
raise SystemExit
def login_crawler_gen_rules_error():
str_err = "Usage for login crawl: python login-crawler.py rule-gen <credentials-file> <start-index> "
str_err += "<end-index> <capture-path> <display 0/1> <process-tag> <output-rule-log>"
print str_err
raise SystemExit
def login_crawler_standard_playback_error():
str_err = "Usage for login crawl: python login-crawler.py standard-playback <rule-list> <credentials-file> " \
"<start-index> <end-index> <capture-path> <display 0/1> <process-tag>"
print str_err
raise SystemExit
def login_crawler_tor_playback_error():
str_err = "Usage for login crawl: python login-crawler.py tor-playback <rule-list> <credentials-file> " \
"<start-index> <end-index> <capture-path> <display 0/1> <process-tag> <exit-ip> <tor-port>"
print str_err
raise SystemExit
| 36.865385 | 114 | 0.700052 | 562 | 3,834 | 4.596085 | 0.101423 | 0.092915 | 0.059621 | 0.086721 | 0.865273 | 0.855981 | 0.822687 | 0.803329 | 0.646922 | 0.646922 | 0 | 0.006958 | 0.175274 | 3,834 | 103 | 115 | 37.223301 | 0.80993 | 0 | 0 | 0.493151 | 0 | 0.30137 | 0.572286 | 0.041493 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.191781 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4548b2dfb1002b5e64cd4be83258120c4616a1c9 | 6,211 | py | Python | tests/test_SklearnTfidfVectorizerConverter.py | wenbingl/sklearn-onnx | b18cf687f3ffa5fe7f6d23e2f06f2095da622e26 | [
"MIT"
] | null | null | null | tests/test_SklearnTfidfVectorizerConverter.py | wenbingl/sklearn-onnx | b18cf687f3ffa5fe7f6d23e2f06f2095da622e26 | [
"MIT"
] | null | null | null | tests/test_SklearnTfidfVectorizerConverter.py | wenbingl/sklearn-onnx | b18cf687f3ffa5fe7f6d23e2f06f2095da622e26 | [
"MIT"
] | null | null | null | """
Tests scikit-learn's binarizer converter.
"""
import unittest
import numpy
from sklearn.feature_extraction.text import TfidfVectorizer
from skl2onnx import convert_sklearn
from skl2onnx.common.data_types import StringTensorType
from test_utils import dump_data_and_model
class TestSklearnTfidfVectorizer(unittest.TestCase):
def test_model_tfidf_vectorizer11(self):
corpus = numpy.array([
'This is the first document.',
'This document is the second document.',
'And this is the third one.',
'Is this the first document?',
]).reshape((4, 1))
vect = TfidfVectorizer(ngram_range=(1, 1), norm=None)
vect.fit(corpus.ravel())
pred = vect.transform(corpus.ravel())
model_onnx = convert_sklearn(vect, 'TfidfVectorizer',
[('input', StringTensorType([1, 1]))])
self.assertTrue(model_onnx is not None)
dump_data_and_model(corpus, vect, model_onnx, basename="SklearnTfidfVectorizer11-OneOff-SklCol",
allow_failure="StrictVersion(onnxruntime.__version__) <= StrictVersion('0.1.4')")
def test_model_tfidf_vectorizer22(self):
corpus = numpy.array([
'This is the first document.',
'This document is the second document.',
'And this is the third one.',
'Is this the first document?',
]).reshape((4, 1))
vect = TfidfVectorizer(ngram_range=(2, 2), norm=None)
vect.fit(corpus.ravel())
pred = vect.transform(corpus.ravel())
model_onnx = convert_sklearn(vect, 'TfidfVectorizer',
[('input', StringTensorType([1, 1]))])
self.assertTrue(model_onnx is not None)
dump_data_and_model(corpus, vect, model_onnx, basename="SklearnTfidfVectorizer22-OneOff-SklCol",
allow_failure="StrictVersion(onnxruntime.__version__) <= StrictVersion('0.1.4')")
def test_model_tfidf_vectorizer12(self):
corpus = numpy.array([
'AA AA',
'AA AA BB',
]).reshape((2, 1))
vect = TfidfVectorizer(ngram_range=(1, 2), norm=None)
vect.fit(corpus.ravel())
pred = vect.transform(corpus.ravel())
model_onnx = convert_sklearn(vect, 'TfidfVectorizer',
[('input', StringTensorType([1, 1]))])
self.assertTrue(model_onnx is not None)
dump_data_and_model(corpus, vect, model_onnx, basename="SklearnTfidfVectorizer22S-OneOff-SklCol",
allow_failure="StrictVersion(onnxruntime.__version__) <= StrictVersion('0.1.4')")
def test_model_tfidf_vectorizer12(self):
corpus = numpy.array([
'This is the first document.',
'This document is the second document.',
'And this is the third one.',
'Is this the first document?',
]).reshape((4, 1))
vect = TfidfVectorizer(ngram_range=(1, 2), norm=None)
vect.fit(corpus.ravel())
pred = vect.transform(corpus.ravel())
model_onnx = convert_sklearn(vect, 'TfidfVectorizer',
[('input', StringTensorType([1, 1]))])
self.assertTrue(model_onnx is not None)
dump_data_and_model(corpus, vect, model_onnx, basename="SklearnTfidfVectorizer22-OneOff-SklCol",
allow_failure="StrictVersion(onnxruntime.__version__) <= StrictVersion('0.1.4')")
def test_model_tfidf_vectorizer12_normL1(self):
corpus = numpy.array([
'This is the first document.',
'This document is the second document.',
'And this is the third one.',
'Is this the first document?',
]).reshape((4, 1))
vect = TfidfVectorizer(ngram_range=(1, 2), norm='l1')
vect.fit(corpus.ravel())
pred = vect.transform(corpus.ravel())
model_onnx = convert_sklearn(vect, 'TfidfVectorizer',
[('input', StringTensorType([1, 1]))])
self.assertTrue(model_onnx is not None)
dump_data_and_model(corpus, vect, model_onnx, basename="SklearnTfidfVectorizer22L1-OneOff-SklCol",
allow_failure="StrictVersion(onnxruntime.__version__) <= StrictVersion('0.1.4')")
def test_model_tfidf_vectorizer12_normL2(self):
corpus = numpy.array([
'This is the first document.',
'This document is the second document.',
'And this is the third one.',
'Is this the first document?',
]).reshape((4, 1))
vect = TfidfVectorizer(ngram_range=(1, 2), norm='l2')
vect.fit(corpus.ravel())
pred = vect.transform(corpus.ravel())
model_onnx = convert_sklearn(vect, 'TfidfVectorizer',
[('input', StringTensorType([1, 1]))])
self.assertTrue(model_onnx is not None)
dump_data_and_model(corpus, vect, model_onnx, basename="SklearnTfidfVectorizer22L2-OneOff-SklCol",
allow_failure="StrictVersion(onnxruntime.__version__) <= StrictVersion('0.1.4')")
def test_model_tfidf_vectorizer13(self):
corpus = numpy.array([
'This is the first document.',
'This document is the second document.',
'And this is the third one.',
'Is this the first document?',
]).reshape((4, 1))
vect = TfidfVectorizer(ngram_range=(1, 3), norm=None)
vect.fit(corpus.ravel())
pred = vect.transform(corpus.ravel())
model_onnx = convert_sklearn(vect, 'TfidfVectorizer',
[('input', StringTensorType([1, 1]))])
self.assertTrue(model_onnx is not None)
dump_data_and_model(corpus, vect, model_onnx, basename="SklearnTfidfVectorizer13-OneOff-SklCol",
allow_failure="StrictVersion(onnxruntime.__version__) <= StrictVersion('0.1.4')")
if __name__ == "__main__":
unittest.main()
| 48.905512 | 109 | 0.590243 | 654 | 6,211 | 5.408257 | 0.134557 | 0.053435 | 0.030534 | 0.036189 | 0.854962 | 0.854962 | 0.854962 | 0.854962 | 0.854962 | 0.854962 | 0 | 0.022577 | 0.293995 | 6,211 | 126 | 110 | 49.293651 | 0.784036 | 0.006601 | 0 | 0.758929 | 0 | 0 | 0.257384 | 0.112139 | 0 | 0 | 0 | 0 | 0.0625 | 1 | 0.0625 | false | 0 | 0.053571 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
45646b56ac49802c9fa26c17df725cb201eb5329 | 140 | py | Python | hTools2.roboFontExt/lib/Scripts/selected glyphs/layers/mask.py | gferreira/hTools2_extension | 9e5150082a0a39847c1078aac3dc38d914a44f83 | [
"BSD-3-Clause"
] | 1 | 2015-02-10T14:47:39.000Z | 2015-02-10T14:47:39.000Z | hTools2.roboFontExt/lib/Scripts/selected glyphs/layers/mask.py | gferreira/hTools2_extension | 9e5150082a0a39847c1078aac3dc38d914a44f83 | [
"BSD-3-Clause"
] | 2 | 2017-08-08T21:02:17.000Z | 2019-12-18T15:55:48.000Z | hTools2.roboFontExt/lib/Scripts/selected glyphs/layers/mask.py | gferreira/hTools2_extension | 9e5150082a0a39847c1078aac3dc38d914a44f83 | [
"BSD-3-Clause"
] | 3 | 2015-08-17T04:09:05.000Z | 2021-08-09T20:22:04.000Z | # [h] copy glyphs to mask
import hTools2.dialogs.glyphs.mask
reload(hTools2.dialogs.glyphs.mask)
hTools2.dialogs.glyphs.mask.maskDialog()
| 20 | 40 | 0.792857 | 20 | 140 | 5.55 | 0.5 | 0.378378 | 0.540541 | 0.648649 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023438 | 0.085714 | 140 | 6 | 41 | 23.333333 | 0.84375 | 0.164286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
45b2a6201f839fa468bdf62193baa520c61a48a5 | 204 | py | Python | geomstats/learning/__init__.py | opeltre/geomstats | 135d5bb6f19e29dd453c68399e04100a9e2c76bf | [
"MIT"
] | null | null | null | geomstats/learning/__init__.py | opeltre/geomstats | 135d5bb6f19e29dd453c68399e04100a9e2c76bf | [
"MIT"
] | null | null | null | geomstats/learning/__init__.py | opeltre/geomstats | 135d5bb6f19e29dd453c68399e04100a9e2c76bf | [
"MIT"
] | null | null | null | from ._template import TemplateClassifier
from ._template import TemplateEstimator
from ._template import TemplateTransformer
__all__ = ['TemplateEstimator', 'TemplateClassifier', 'TemplateTransformer']
| 34 | 76 | 0.843137 | 16 | 204 | 10.3125 | 0.4375 | 0.218182 | 0.327273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.088235 | 204 | 5 | 77 | 40.8 | 0.887097 | 0 | 0 | 0 | 0 | 0 | 0.264706 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 1 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
45bc0dc328f9b3927c1bb468c40e0ca967ef0bca | 1,062 | py | Python | src/Athena/ressources/Athena_example/UserContext/maya/processes/testCheck.py | gpijat/Athena | 41836c7fd893e8df726942fbebda30aaddc30306 | [
"MIT"
] | null | null | null | src/Athena/ressources/Athena_example/UserContext/maya/processes/testCheck.py | gpijat/Athena | 41836c7fd893e8df726942fbebda30aaddc30306 | [
"MIT"
] | null | null | null | src/Athena/ressources/Athena_example/UserContext/maya/processes/testCheck.py | gpijat/Athena | 41836c7fd893e8df726942fbebda30aaddc30306 | [
"MIT"
] | 1 | 2020-11-06T20:11:29.000Z | 2020-11-06T20:11:29.000Z | from Athena import AtCore
class TestForSanityCheck(AtCore.Process):
"""This check is a demo to get name and docstring.
Check:
Voici le detail du check.
fix:
Voici le detail du fix.
ui:
L'ui lance tel script
features:
- numero 1
- Have a realtime check
- tamer
"""
NAME = 'TestDeProcess NAME'
def __init__(self):
pass
def check(self):
print self.NAME, 'check'
print '#'*100
class BestCheckEver(AtCore.Process):
"""This check is a demo to get name and docstring.
Check:
Voici le detail du check.
fix:
Voici le detail du fix.
ui:
L'ui lance tel script
features:
- numero 1
- Have a realtime check
- tamer
"""
def __init__(self):
pass
def check(self):
print self.NAME, 'check'
return 'BestCheckEver'
def fix(self):
print self.NAME, 'fix' | 17.7 | 54 | 0.509416 | 120 | 1,062 | 4.441667 | 0.35 | 0.052533 | 0.097561 | 0.11257 | 0.742964 | 0.742964 | 0.742964 | 0.742964 | 0.742964 | 0.742964 | 0 | 0.008078 | 0.417137 | 1,062 | 60 | 55 | 17.7 | 0.852989 | 0 | 0 | 0.5 | 0 | 0 | 0.097192 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.125 | 0.0625 | null | null | 0.25 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
45dc6f6623f5216a2ab38539db143e1ec2a4aec6 | 3,884 | py | Python | src/test/python/model/test_rta.py | 3ll3d00d/qvibe | 5f368b30d82cac3be00736d71fbd427432768f9e | [
"MIT"
] | null | null | null | src/test/python/model/test_rta.py | 3ll3d00d/qvibe | 5f368b30d82cac3be00736d71fbd427432768f9e | [
"MIT"
] | 55 | 2019-08-19T20:51:04.000Z | 2022-02-11T20:35:50.000Z | src/test/python/model/test_rta.py | 3ll3d00d/qvibe | 5f368b30d82cac3be00736d71fbd427432768f9e | [
"MIT"
] | null | null | null | import numpy as np
from model.rta import ChunkCalculator
min_nperseg = 512
stride = 25
def test_first_data_is_too_small():
cc = ChunkCalculator(min_nperseg, stride)
data = np.arange(60).reshape(3, 20).transpose()
chunks = cc.recalc('test', data)
assert chunks is None
assert 'test' not in cc.last_idx
def test_first_data_is_exactly_one_chunk():
cc = ChunkCalculator(min_nperseg, stride)
data = np.arange(min_nperseg * 3).reshape(3, min_nperseg).transpose()
chunks = cc.recalc('test', data)
assert chunks is not None
assert len(chunks) == 1
assert chunks[0][:, 0][0] == 0
assert chunks[0][:, 0][-1] == min_nperseg - 1
assert 'test' in cc.last_idx
assert cc.last_idx['test'] == min_nperseg - 1
def test_first_data_is_more_than_a_chunk():
cc = ChunkCalculator(min_nperseg, stride)
data = np.arange((min_nperseg + 20) * 3).reshape(3, min_nperseg + 20).transpose()
chunks = cc.recalc('test', data)
assert chunks is not None
assert len(chunks) == 1
assert chunks[0][:, 0][0] == 0
assert chunks[0][:, 0][-1] == min_nperseg - 1
assert 'test' in cc.last_idx
assert cc.last_idx['test'] == min_nperseg - 1
def test_first_data_is_many_chunks():
cc = ChunkCalculator(min_nperseg, stride)
data = np.arange((min_nperseg + 105) * 3).reshape(3, min_nperseg + 105).transpose()
chunks = cc.recalc('test', data)
assert chunks is not None
assert len(chunks) == 5
last = min_nperseg - 1
first = 0
for i in range(0, 5):
assert chunks[i][:, 0][0] == first
assert chunks[i][:, 0][-1] == last
first += stride
last += stride
assert 'test' in cc.last_idx
assert cc.last_idx['test'] == min_nperseg - 1 + 100
def test_next_data_is_less_than_chunk():
cc = ChunkCalculator(min_nperseg, stride)
cc.last_idx['test'] = min_nperseg - 1
data = np.arange((min_nperseg + 20) * 3).reshape(3, min_nperseg + 20).transpose()
chunks = cc.recalc('test', data)
assert chunks is None
assert 'test' in cc.last_idx
assert cc.last_idx['test'] == min_nperseg - 1
def test_next_data_fills_a_chunk():
cc = ChunkCalculator(min_nperseg, stride)
cc.last_idx['test'] = min_nperseg - 1
data = np.arange((min_nperseg + stride) * 3).reshape(3, min_nperseg + stride).transpose()
chunks = cc.recalc('test', data)
assert chunks is not None
assert len(chunks) == 1
assert chunks[0][:, 0][0] == stride
assert chunks[0][:, 0][-1] == min_nperseg - 1 + stride
assert 'test' in cc.last_idx
assert cc.last_idx['test'] == min_nperseg - 1 + stride
def test_next_data_is_between_chunks():
cc = ChunkCalculator(min_nperseg, stride)
cc.last_idx['test'] = min_nperseg - 1
data = np.arange((min_nperseg + stride + 10) * 3).reshape(3, min_nperseg + stride + 10).transpose()
chunks = cc.recalc('test', data)
assert chunks is not None
assert len(chunks) == 1
assert chunks[0][:, 0][0] == stride
assert chunks[0][:, 0][-1] == min_nperseg - 1 + stride
assert 'test' in cc.last_idx
assert cc.last_idx['test'] == min_nperseg - 1 + stride
def test_next_data_is_many_chunks():
cc = ChunkCalculator(min_nperseg, stride)
cc.last_idx['test'] = min_nperseg - 1
data = np.arange((min_nperseg + stride * 3 + 10) * 3).reshape(3, min_nperseg + stride * 3 + 10).transpose()
chunks = cc.recalc('test', data)
assert chunks is not None
assert len(chunks) == 3
assert chunks[0][:, 0][0] == stride
assert chunks[0][:, 0][-1] == min_nperseg - 1 + stride
assert chunks[1][:, 0][0] == stride * 2
assert chunks[1][:, 0][-1] == stride * 2 - 1 + min_nperseg
assert chunks[2][:, 0][0] == stride * 3
assert chunks[2][:, 0][-1] == stride * 3 - 1 + min_nperseg
assert 'test' in cc.last_idx
assert cc.last_idx['test'] == stride * 3 - 1 + min_nperseg
| 35.962963 | 111 | 0.641864 | 592 | 3,884 | 4.033784 | 0.096284 | 0.175879 | 0.071608 | 0.059883 | 0.860972 | 0.80737 | 0.79732 | 0.773869 | 0.755025 | 0.749162 | 0 | 0.043067 | 0.210865 | 3,884 | 107 | 112 | 36.299065 | 0.736052 | 0 | 0 | 0.622222 | 0 | 0 | 0.027806 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0.088889 | false | 0 | 0.022222 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
45debf395634cd29c341d8bc28c828e463e02972 | 243 | py | Python | scripts/automation/trex_control_plane/interactive/trex/wireless/examples/trex_path.py | timgates42/trex-core | efe94752fcb2d0734c83d4877afe92a3dbf8eccd | [
"Apache-2.0"
] | 956 | 2015-06-24T15:04:55.000Z | 2022-03-30T06:25:04.000Z | scripts/automation/trex_control_plane/interactive/trex/wireless/examples/trex_path.py | angelyouyou/trex-core | fddf78584cae285d9298ef23f9f5c8725e16911e | [
"Apache-2.0"
] | 782 | 2015-09-20T15:19:00.000Z | 2022-03-31T23:52:05.000Z | scripts/automation/trex_control_plane/interactive/trex/wireless/examples/trex_path.py | angelyouyou/trex-core | fddf78584cae285d9298ef23f9f5c8725e16911e | [
"Apache-2.0"
] | 429 | 2015-06-27T19:34:21.000Z | 2022-03-23T11:02:51.000Z | import sys, os
cur_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(cur_dir, os.pardir))
sys.path.insert(0, os.path.join(cur_dir, os.pardir, os.pardir))
sys.path.insert(0, os.path.join(cur_dir, os.pardir, os.pardir, os.pardir)) | 40.5 | 74 | 0.740741 | 48 | 243 | 3.583333 | 0.25 | 0.27907 | 0.186047 | 0.244186 | 0.755814 | 0.755814 | 0.755814 | 0.755814 | 0.755814 | 0.755814 | 0 | 0.013333 | 0.074074 | 243 | 6 | 74 | 40.5 | 0.751111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
afddd4a19a0ec0356c78529f8aa0f321a58395ad | 59 | py | Python | trial_python_hello/__init__.py | Mitika-d/trial_python_hello | a296ff93eea482403b1a41e089abe3152852db89 | [
"MIT"
] | null | null | null | trial_python_hello/__init__.py | Mitika-d/trial_python_hello | a296ff93eea482403b1a41e089abe3152852db89 | [
"MIT"
] | null | null | null | trial_python_hello/__init__.py | Mitika-d/trial_python_hello | a296ff93eea482403b1a41e089abe3152852db89 | [
"MIT"
] | null | null | null | from trial_python_hello.trial_python_hello import say_hello | 59 | 59 | 0.932203 | 10 | 59 | 5 | 0.6 | 0.44 | 0.64 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.050847 | 59 | 1 | 59 | 59 | 0.892857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
640eb1fae2ed319c8c4be4d9b19aea4a3484e228 | 53,104 | py | Python | venv/lib/python3.8/site-packages/redbaron/nodes.py | nimesh-p/quize-app | b8c7f7b79ef5dd33e35150b0f081155df460dde9 | [
"MIT"
] | null | null | null | venv/lib/python3.8/site-packages/redbaron/nodes.py | nimesh-p/quize-app | b8c7f7b79ef5dd33e35150b0f081155df460dde9 | [
"MIT"
] | 20 | 2021-05-03T18:02:23.000Z | 2022-03-12T12:01:04.000Z | Lib/site-packages/redbaron/nodes.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
] | null | null | null | from __future__ import absolute_import
import re
import baron
from baron.utils import string_instance
from redbaron.base_nodes import Node, NodeList, LiteralyEvaluable, CodeBlockNode, DotProxyList, CommaProxyList, LineProxyList, IfElseBlockSiblingNode, ElseAttributeNode
from redbaron.syntax_highlight import python_html_highlight
class ArgumentGeneratorComprehensionNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "generators":
fst = baron.parse("(x %s)" % string)[0]["generators"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "result":
return Node.from_fst(baron.parse("(%s for x in x)" % string)[0]["result"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class AssertNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("assert %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "message":
if string:
self.third_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute=on_attribute, parent=parent)]
return Node.from_fst(baron.parse("assert plop, %s" % string)[0]["message"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class AssignmentNode(Node):
_other_identifiers = ["assign"]
def __setattr__(self, key, value):
if key == "operator":
if len(value) == 2 and value[1] == "=":
value = value[0]
elif len(value) == 1 and value == "=":
value = ""
elif value is None:
value = ""
elif len(value) not in (0, 1, 2):
raise Exception("The value of the operator can only be a string of one or two char, for eg: '+', '+=', '=', ''")
return super(AssignmentNode, self).__setattr__(key, value)
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "target":
return Node.from_fst(baron.parse("%s = a" % string)[0]["target"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "value":
return Node.from_fst(baron.parse("a = %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "annotation":
if not string.strip():
self.annotation_first_formatting = []
self.annotation_second_formatting = []
return ""
else:
if not self.annotation_first_formatting:
self.annotation_first_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="return_annotation_first_formatting", parent=self)]
if not self.annotation_second_formatting:
self.annotation_second_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="return_annotation_first_formatting", parent=self)]
return Node.from_fst(baron.parse("a: %s = a" % string)[0]["annotation"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class AssociativeParenthesisNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("(%s)" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class AtomtrailersNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "value":
return NodeList.from_fst(baron.parse("(%s)" % string)[0]["value"]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def __setattr__(self, key, value):
super(AtomtrailersNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, DotProxyList):
setattr(self, "value", DotProxyList(self.value))
class AwaitNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("await %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class BinaryNode(Node, LiteralyEvaluable):
def __setattr__(self, key, value):
if key == "value" and isinstance(value, string_instance):
assert baron.parse(value)[0]["type"] == "binary"
return super(BinaryNode, self).__setattr__(key, value)
class BinaryOperatorNode(Node):
def __setattr__(self, key, value):
if key == "value" and isinstance(value, string_instance):
assert baron.parse("a %s b" % value)[0]["type"] == "binary_operator"
return super(BinaryOperatorNode, self).__setattr__(key, value)
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "first":
return Node.from_fst(baron.parse("%s + b" % string)[0]["first"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "second":
return Node.from_fst(baron.parse("bb + %s" % string)[0]["second"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class BinaryStringNode(Node, LiteralyEvaluable):
pass
class BinaryRawStringNode(Node, LiteralyEvaluable):
pass
class BooleanOperatorNode(Node):
def __setattr__(self, key, value):
if key == "value" and isinstance(value, string_instance):
assert baron.parse("a %s b" % value)[0]["type"] == "boolean_operator"
return super(BooleanOperatorNode, self).__setattr__(key, value)
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "first":
return Node.from_fst(baron.parse("%s and b" % string)[0]["first"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "second":
return Node.from_fst(baron.parse("bb and %s" % string)[0]["second"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class BreakNode(Node):
pass
class CallNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "value":
return NodeList.from_fst(baron.parse("a(%s)" % string)[0]["value"][1]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def __setattr__(self, key, value):
super(CallNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, CommaProxyList):
setattr(self, "value", CommaProxyList(self.value))
class CallArgumentNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("a(%s)" % string)[0]["value"][1]["value"][0]["value"], parent=parent, on_attribute=on_attribute) if string else ""
elif on_attribute == "target":
return Node.from_fst(baron.parse("a(%s=b)" % string)[0]["value"][1]["value"][0]["target"], parent=parent, on_attribute=on_attribute) if string else ""
else:
raise Exception("Unhandled case")
class ClassNode(CodeBlockNode):
_default_test_value = "name"
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "decorators":
return self.parse_decorators(string, parent=parent, on_attribute=on_attribute)
elif on_attribute == "inherit_from":
if string:
self.parenthesis = True
else:
self.parenthesis = False
return NodeList.from_fst(baron.parse("class a(%s): pass" % string)[0]["inherit_from"], parent=parent, on_attribute=on_attribute)
else:
return super(ClassNode, self)._string_to_node_list(string, parent, on_attribute)
def __setattr__(self, key, value):
super(ClassNode, self).__setattr__(key, value)
if key == "inherit_from" and not isinstance(self.inherit_from, CommaProxyList):
setattr(self, "inherit_from", CommaProxyList(self.inherit_from, on_attribute="inherit_from"))
class CommaNode(Node):
pass
class CommentNode(Node):
pass
class ComparisonNode(Node):
def __setattr__(self, key, value):
if key == "value" and isinstance(value, string_instance):
assert baron.parse("a %s b" % value)[0]["type"] == "comparison"
return super(ComparisonNode, self).__setattr__(key, value)
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "first":
return Node.from_fst(baron.parse("%s > b" % string)[0]["first"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "value":
return Node.from_fst(baron.parse("a %s b" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "second":
return Node.from_fst(baron.parse("bb > %s" % string)[0]["second"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class ComparisonOperatorNode(Node):
pass
class ComplexNode(Node):
pass
class ComprehensionIfNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("[x for x in x if %s]" % string)[0]["generators"][0]["ifs"][0]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class ComprehensionLoopNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "ifs":
return NodeList.from_fst(baron.parse("[x for x in x %s]" % string)[0]["generators"][0]["ifs"], parent=parent, on_attribute=on_attribute)
else:
return super(ClassNode, self)._string_to_node_list(string, parent, on_attribute)
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "iterator":
return Node.from_fst(baron.parse("[x for %s in x]" % string)[0]["generators"][0]["iterator"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "target":
return Node.from_fst(baron.parse("[x for s in %s]" % string)[0]["generators"][0]["target"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class ContinueNode(Node):
pass
class DecoratorNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("@%s()\ndef a(): pass" % string)[0]["decorators"][0]["value"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "call":
if string:
return Node.from_fst(baron.parse("@a%s\ndef a(): pass" % string)[0]["decorators"][0]["call"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class DefNode(CodeBlockNode):
_other_identifiers = ["funcdef", "funcdef_"]
_default_test_value = "name"
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "return_annotation":
if not string.strip():
self.return_annotation_first_formatting = []
self.return_annotation_second_formatting = []
return ""
else:
fst = baron.parse("def a() -> %s: pass" % string)[0]["return_annotation"]
if not self.return_annotation_first_formatting:
self.return_annotation_first_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="return_annotation_first_formatting", parent=self)]
if not self.return_annotation_second_formatting:
self.return_annotation_second_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="return_annotation_second_formatting", parent=self)]
return Node.from_fst(fst, parent=parent, on_attribute=on_attribute)
return super(DefNode, self)._string_to_node(string, parent, on_attribute)
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "arguments":
fst = baron.parse("def a(%s): pass" % string)[0]["arguments"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
elif on_attribute == "decorators":
return self.parse_decorators(string, parent=parent, on_attribute=on_attribute)
else:
return super(DefNode, self)._string_to_node_list(string, parent, on_attribute)
def __setattr__(self, key, value):
super(DefNode, self).__setattr__(key, value)
if key == "arguments" and not isinstance(self.arguments, CommaProxyList):
setattr(self, "arguments", CommaProxyList(self.arguments, on_attribute="arguments"))
elif key in ("async", "async_") and getattr(self, "async") and hasattr(self, "async_formatting") and not self.async_formatting:
self.async_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="return_annotation_first_formatting", parent=self)]
class DefArgumentNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("def a(b=%s): pass" % string)[0]["arguments"][0]["value"], parent=parent, on_attribute=on_attribute) if string else ""
elif on_attribute == "target":
return Node.from_fst(baron.parse("def a(%s=b): pass" % string)[0]["arguments"][0]["target"], parent=parent, on_attribute=on_attribute) if string else ""
elif on_attribute == "annotation":
if not self.annotation_first_formatting:
self.annotation_first_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="annotation_first_formatting", parent=self)]
if not self.annotation_second_formatting:
self.annotation_second_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="annotation_second_formatting", parent=self)]
return Node.from_fst(baron.parse("def a(a:%s=b): pass" % string)[0]["arguments"][0]["annotation"], parent=parent, on_attribute=on_attribute) if string else ""
else:
raise Exception("Unhandled case")
class DelNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("del %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class DictArgumentNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("a(**%s)" % string)[0]["value"][1]["value"][0]["value"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "annotation":
if not self.annotation_first_formatting:
self.annotation_first_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="annotation_first_formatting", parent=self)]
if not self.annotation_second_formatting:
self.annotation_second_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="annotation_second_formatting", parent=self)]
return Node.from_fst(baron.parse("def a(a:%s=b): pass" % string)[0]["arguments"][0]["annotation"], parent=parent, on_attribute=on_attribute) if string else ""
else:
raise Exception("Unhandled case")
class DictitemNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("{a: %s}" % string)[0]["value"][0]["value"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "key":
return Node.from_fst(baron.parse("{%s: a}" % string)[0]["value"][0]["key"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class DictNode(Node, LiteralyEvaluable):
def _string_to_node_list(self, string, parent, on_attribute):
fst = baron.parse("{%s}" % string)[0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
def __setattr__(self, key, value):
super(DictNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, CommaProxyList):
setattr(self, "value", CommaProxyList(self.value))
class DictComprehensionNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "generators":
fst = baron.parse("{x %s}" % string)[0]["generators"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "result":
return Node.from_fst(baron.parse("{%s for x in x}" % string)[0]["result"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class DotNode(Node):
pass
class DottedAsNameNode(Node):
def __setattr__(self, key, value):
if key == "target":
if not (re.match(r'^[a-zA-Z_]\w*$', value) or value in ("", None)):
raise Exception("The target of a dotted as name node can only be a 'name' or an empty string or None")
if value:
self.first_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="delimiter", parent=self)]
self.second_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="delimiter", parent=self)]
super(DottedAsNameNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, DotProxyList):
setattr(self, "value", DotProxyList(self.value))
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "value":
fst = baron.parse("import %s" % string)[0]["value"][0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class DottedNameNode(Node):
pass
class ElifNode(IfElseBlockSiblingNode):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "test":
return Node.from_fst(baron.parse("if %s: pass" % string)[0]["value"][0]["test"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class EllipsisNode(Node):
pass
class ElseNode(IfElseBlockSiblingNode):
@property
def next_intuitive(self):
if self.parent.type == "ifelseblock":
return super(ElseNode, self).next_intuitive
elif self.parent.type == "try":
if self.parent.finally_:
return self.parent.finally_
else:
return self.parent.next
elif self.parent.type in ("for", "while"):
return self.parent.next
@property
def previous_intuitive(self):
if self.parent.type == "ifelseblock":
return super(ElseNode, self).previous_intuitive
elif self.parent.type == "try":
return self.parent.excepts[-1]
elif self.parent.type in ("for", "while"):
return self.parent
class EndlNode(Node):
def __repr__(self):
return repr(baron.dumps([self.fst()]))
def _bytes_repr_html_(self):
return python_html_highlight(self.__repr__())
class ExceptNode(CodeBlockNode):
@property
def next_intuitive(self):
next_ = self.next
if next_:
return next_
if self.parent.else_:
return self.parent.else_
if self.parent.finally_:
return self.parent.finally_
if self.parent.next:
return self.parent.next
@property
def previous_intuitive(self):
previous_ = self.previous
if previous_:
return previous_
return self.parent
def __setattr__(self, key, value):
if key == "delimiter":
if value == ",":
self.second_formatting = []
self.third_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="delimiter", parent=self)]
elif value == "as":
self.second_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="delimiter", parent=self)]
self.third_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="delimiter", parent=self)]
elif value:
raise Exception("Delimiters of an except node can only be 'as' or ',' (without spaces around it).")
super(ExceptNode, self).__setattr__(key, value)
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "exception":
if string:
self.first_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute=on_attribute, parent=parent)]
return Node.from_fst(baron.parse("try: pass\nexcept %s: pass" % string)[0]["excepts"][0]["exception"], parent=parent, on_attribute=on_attribute)
else:
self.first_formatting = []
self.delimiter = ""
self.target = ""
return ""
elif on_attribute == "target":
if not self.exception:
raise Exception("Can't set a target to an exception node that doesn't have an exception set")
if string:
self.delimiter = "as"
self.second_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute=on_attribute, parent=parent)]
self.third_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute=on_attribute, parent=parent)]
return Node.from_fst(baron.parse("try: pass\nexcept a as %s: pass" % string)[0]["excepts"][0]["target"], parent=parent, on_attribute=on_attribute)
else:
self.delimiter = ""
self.second_formatting = []
self.third_formatting = []
return ""
else:
raise Exception("Unhandled case")
class ExecNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("exec %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "globals":
if string:
self.second_formatting = [{"type": "space", "value": " "}]
self.third_formatting = [{"type": "space", "value": " "}]
return Node.from_fst(baron.parse("exec a in %s" % string)[0]["globals"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "locals":
if not self.globals:
raise Exception("I can't set locals when globals aren't set.")
if string:
self.fifth_formatting = [{"type": "space", "value": " "}]
return Node.from_fst(baron.parse("exec a in b, %s" % string)[0]["locals"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class FinallyNode(CodeBlockNode):
@property
def next_intuitive(self):
return self.parent.next
@property
def previous_intuitive(self):
if self.parent.else_:
return self.parent.else_
if self.parent.excepts:
return self.parent.excepts[-1]
return self.parent
def __setattr__(self, key, value):
super(FinallyNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, LineProxyList):
setattr(self, "value", LineProxyList(self.value, on_attribute="value"))
class ForNode(ElseAttributeNode):
@property
def next_intuitive(self):
if self.else_:
return self.else_
return self.next
def __setattr__(self, key, value):
super(ForNode, self).__setattr__(key, value)
if key in ("async", "async_") and getattr(self, "async") and hasattr(self, "async_formatting") and not self.async_formatting:
self.async_formatting = " "
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "target":
return Node.from_fst(baron.parse("for i in %s: pass" % string)[0]["target"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "iterator":
return Node.from_fst(baron.parse("for %s in i: pass" % string)[0]["iterator"], parent=parent, on_attribute=on_attribute)
else:
return super(ForNode, self)._string_to_node(string, parent, on_attribute)
class FloatNode(Node, LiteralyEvaluable):
pass
class FloatExponantNode(Node, LiteralyEvaluable):
pass
class FloatExponantComplexNode(Node, LiteralyEvaluable):
pass
class FromImportNode(Node):
def names(self):
"""Return the list of new names imported
For example:
RedBaron("from qsd import a, c, e as f").names() == ['a', 'c', 'f']
"""
return [x.target if getattr(x, "target", None) else x.value
for x in self.targets
if not isinstance(x, (LeftParenthesisNode, RightParenthesisNode))]
def modules(self):
"""Return the list of the targets imported
For example (notice 'e' instead of 'f'):
RedBaron("from qsd import a, c, e as f").names() == ['a', 'c', 'e']
"""
return [x.value for x in self.targets]
def full_path_names(self):
"""Return the list of new names imported with the full module path
For example (notice 'e' instead of 'f'):
RedBaron("from qsd import a, c, e as f").names() == ['qsd.a', 'qsd.c', 'qsd.f']
"""
return [self.value.dumps() + "." + (x.target if x.target else x.value)
for x in self.targets
if not isinstance(x, (LeftParenthesisNode, RightParenthesisNode))]
def full_path_modules(self):
"""Return the list of the targets imported with the full module path
For example (notice 'e' instead of 'f'):
RedBaron("from qsd import a, c, e as f").names() == ['qsd.a', 'qsd.c', 'qsd.e']
"""
return [self.value.dumps() + "." + x.value
for x in self.targets
if not isinstance(x, (LeftParenthesisNode, RightParenthesisNode))]
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "targets":
fst = baron.parse("from a import %s" % string)[0]["targets"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
if on_attribute == "value":
fst = baron.parse("from %s import s" % string)[0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def __setattr__(self, key, value):
super(FromImportNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, DotProxyList):
setattr(self, "value", DotProxyList(self.value, on_attribute="value"))
if key == "targets" and not isinstance(self.targets, CommaProxyList):
setattr(self, "targets", CommaProxyList(self.targets, on_attribute="targets"))
class GeneratorComprehensionNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "generators":
fst = baron.parse("(x %s)" % string)[0]["generators"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "result":
return Node.from_fst(baron.parse("(%s for x in x)" % string)[0]["result"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class GetitemNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("a[%s]" % string)[0]["value"][1]["value"], parent=parent, on_attribute=on_attribute)
class GlobalNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "value":
fst = baron.parse("global %s" % string)[0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def __setattr__(self, key, value):
super(GlobalNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, CommaProxyList):
setattr(self, "value", CommaProxyList(self.value, on_attribute="value"))
class HexaNode(Node, LiteralyEvaluable):
pass
class IfNode(IfElseBlockSiblingNode):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "test":
return Node.from_fst(baron.parse("if %s: pass" % string)[0]["value"][0]["test"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class IfelseblockNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute != "value":
return super(IfelseblockNode, self)._string_to_node_list(string, parent=parent, on_attribute=on_attribute)
string = string.rstrip()
string += "\n"
if self.next and self.on_attribute == "root":
string += "\n\n"
elif self.next:
string += "\n"
clean_string = re.sub("^ *\n", "", string) if "\n" in string else string
indentation = len(re.search("^ *", clean_string).group())
if indentation:
string = "\n".join(map(lambda x: x[indentation:], string.split("\n")))
result = NodeList.from_fst(baron.parse(string)[0]["value"], parent=parent, on_attribute=on_attribute)
if self.indentation:
result.increase_indentation(len(self.indentation))
if self.next:
result[-1].value.node_list[-1].indent = self.indentation
return result
class ImportNode(Node):
def modules(self):
"return a list of string of modules imported"
return [x.value.dumps()for x in self('dotted_as_name')]
def names(self):
"return a list of string of new names inserted in the python context"
return [x.target if x.target else x.value.dumps() for x in self('dotted_as_name')]
def _string_to_node_list(self, string, parent, on_attribute):
fst = baron.parse("import %s" % string)[0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
def __setattr__(self, key, value):
super(ImportNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, CommaProxyList):
setattr(self, "value", CommaProxyList(self.value, on_attribute="value"))
class IntNode(Node, LiteralyEvaluable):
def fst(self):
return {
"type": "int",
"value": self.value,
"section": "number",
}
class InterpolatedStringNode(Node, LiteralyEvaluable):
_other_identifiers = ["fstring"]
class InterpolatedRawStringNode(Node, LiteralyEvaluable):
_other_identifiers = ["raw_fstring"]
class KwargsOnlyMarkerNode(Node):
pass
class LambdaNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "arguments":
self.first_formatting = [{"type": "space", "value": " "}] if string else []
fst = baron.parse("lambda %s: x" % string)[0]["arguments"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
return super(DefNode, self)._string_to_node_list(string, parent, on_attribute)
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("lambda: %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def __setattr__(self, key, value):
super(LambdaNode, self).__setattr__(key, value)
if key == "arguments" and not isinstance(self.arguments, CommaProxyList):
setattr(self, "arguments", CommaProxyList(self.arguments, on_attribute="arguments"))
class LeftParenthesisNode(Node):
pass
class ListArgumentNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("lambda *%s: x" % string)[0]["arguments"][0]["value"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "annotation":
if not self.annotation_first_formatting:
self.annotation_first_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="annotation_first_formatting", parent=self)]
if not self.annotation_second_formatting:
self.annotation_second_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="annotation_second_formatting", parent=self)]
return Node.from_fst(baron.parse("def a(a:%s=b): pass" % string)[0]["arguments"][0]["annotation"], parent=parent, on_attribute=on_attribute) if string else ""
else:
raise Exception("Unhandled case")
class ListComprehensionNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "generators":
fst = baron.parse("[x %s]" % string)[0]["generators"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "result":
return Node.from_fst(baron.parse("[%s for x in x]" % string)[0]["result"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class ListNode(Node, LiteralyEvaluable):
def _string_to_node_list(self, string, parent, on_attribute):
fst = baron.parse("[%s]" % string)[0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
def __setattr__(self, key, value):
super(ListNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, CommaProxyList):
setattr(self, "value", CommaProxyList(self.value))
class LongNode(Node, LiteralyEvaluable):
pass
class NameNode(Node, LiteralyEvaluable):
pass
class TypedNameNode(Node):
pass
class NameAsNameNode(Node):
def __setattr__(self, key, value):
if key == "target":
if not (re.match(r'^[a-zA-Z_]\w*$', value) or value in ("", None)):
raise Exception("The target of a name as name node can only be a 'name' or an empty string or None")
if value:
self.first_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="delimiter", parent=self)]
self.second_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="delimiter", parent=self)]
elif key == "value":
if not (re.match(r'^[a-zA-Z_]\w*$', value) or value in ("", None)):
raise Exception("The value of a name as name node can only be a 'name' or an empty string or None")
return super(NameAsNameNode, self).__setattr__(key, value)
class NonlocalNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "value":
fst = baron.parse("global %s" % string)[0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def __setattr__(self, key, value):
super(NonlocalNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, CommaProxyList):
setattr(self, "value", CommaProxyList(self.value, on_attribute="value"))
class OctaNode(Node, LiteralyEvaluable):
pass
class PassNode(Node):
pass
class PrintNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "destination":
if string and not self.value:
self.formatting = [{"type": "space", "value": " "}]
return Node.from_fst(baron.parse("print >>%s" % string)[0]["destination"], parent=parent, on_attribute=on_attribute)
elif string and self.value:
self.formatting = [{"type": "space", "value": " "}]
result = Node.from_fst(baron.parse("print >>%s" % string)[0]["destination"], parent=parent, on_attribute=on_attribute)
if len(self.value.node_list) and not self.value.node_list[0].type == "comma":
self.value = NodeList([Node.from_fst({"type": "comma", "second_formatting": [{"type": "space", "value": " "}], "first_formatting": []}, parent=parent, on_attribute=on_attribute)]) + self.value
return result
elif self.value.node_list and self.value.node_list[0].type == "comma":
self.formatting = [{"type": "space", "value": " "}]
self.value = self.value.node_list[1:]
else:
self.formatting = []
else:
raise Exception("Unhandled case")
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "value":
if string:
self.formatting = [{"type": "space", "value": " "}]
fst = baron.parse(("print %s" if not self.destination else "print >>a, %s") % string)[0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
self.formatting = [] if not string and not self.destination else [{"type": "space", "value": " "}]
return NodeList()
else:
raise Exception("Unhandled case")
def __setattr__(self, key, value):
super(PrintNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, CommaProxyList):
setattr(self, "value", CommaProxyList(self.value))
class RaiseNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
self.first_formatting = [{"type": "space", "value": " "}] if string else []
if string:
return Node.from_fst(baron.parse("raise %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "instance":
if not self.value:
raise Exception("Can't set instance if there is not value")
if string:
self.third_formatting = [{"type": "space", "value": " "}]
if not self.comma_or_from:
self.comma_or_from = ","
return Node.from_fst(baron.parse("raise a, %s" % string)[0]["instance"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "traceback":
if not self.instance:
raise Exception("Can't set traceback if there is not instance")
if string:
self.fifth_formatting = [{"type": "space", "value": " "}]
return Node.from_fst(baron.parse("raise a, b, %s" % string)[0]["traceback"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def __setattr__(self, key, value):
current = getattr(self, "comma_or_from", None)
super(RaiseNode, self).__setattr__(key, value)
if key == "comma_or_from":
if value == current:
return
if value == "from":
self.second_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="second_formatting", parent=self)]
self.third_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="third_formatting", parent=self)]
elif value == ",":
self.second_formatting = []
self.third_formatting = [Node.from_fst({"type": "space", "value": " "}, on_attribute="third_formatting", parent=self)]
class RawStringNode(Node, LiteralyEvaluable):
pass
class RightParenthesisNode(Node):
pass
class ReprNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
fst = baron.parse("`%s`" % string)[0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
def __setattr__(self, key, value):
super(ReprNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, CommaProxyList):
setattr(self, "value", CommaProxyList(self.value))
class ReturnNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
self.formatting = [{"type": "space", "value": " "}] if string else []
if string:
return Node.from_fst(baron.parse("return %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class SemicolonNode(Node):
pass
class SetNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
fst = baron.parse("{%s}" % string)[0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
def __setattr__(self, key, value):
super(SetNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, CommaProxyList):
setattr(self, "value", CommaProxyList(self.value))
class SetComprehensionNode(Node):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "generators":
fst = baron.parse("{x %s}" % string)[0]["generators"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "result":
return Node.from_fst(baron.parse("{%s for x in x}" % string)[0]["result"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class SliceNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "lower":
if string:
return Node.from_fst(baron.parse("a[%s:]" % string)[0]["value"][1]["value"]["lower"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "upper":
if string:
return Node.from_fst(baron.parse("a[:%s]" % string)[0]["value"][1]["value"]["upper"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "step":
self.has_two_colons = bool(string)
if string:
return Node.from_fst(baron.parse("a[::%s]" % string)[0]["value"][1]["value"]["step"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class SpaceNode(Node):
def __repr__(self):
return repr(baron.dumps([self.fst()]))
class StandaloneAnnotationNode(Node):
pass
class StarExpressionNode(Node):
pass
class StarNode(Node):
pass
class StringNode(Node, LiteralyEvaluable):
pass
class StringChainNode(Node, LiteralyEvaluable):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "value":
fst = baron.parse("a = %s" % string)[0]["value"]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class TernaryOperatorNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "first":
return Node.from_fst(baron.parse("%s if b else c" % string)[0]["first"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "second":
return Node.from_fst(baron.parse("a if b else %s" % string)[0]["second"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "value":
return Node.from_fst(baron.parse("a if %s else s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class TryNode(ElseAttributeNode):
@property
def next_intuitive(self):
if self.excepts:
return self.excepts[0]
if self.finally_:
return self.finally_
raise Exception("incoherent state of TryNode, try should be followed either by except or finally")
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute != "excepts":
return super(TryNode, self)._string_to_node_list(string, parent=parent, on_attribute=on_attribute)
clean_string = re.sub("^ *\n", "", string) if "\n" in string else string
indentation = len(re.search("^ *", clean_string).group())
if indentation:
string = "\n".join(map(lambda x: x[indentation:], string.split("\n")))
string = string.rstrip()
string += "\n"
if self.next and self.on_attribute == "root":
string += "\n\n"
elif self.next:
string += "\n"
result = NodeList.from_fst(baron.parse("try:\n pass\n%sfinally:\n pass" % string)[0]["excepts"], parent=parent, on_attribute=on_attribute)
if self.indentation:
result.increase_indentation(len(self.indentation))
if self._get_last_member_to_clean().type != "except":
# assume that this is an endl node, this might break
result[-1].value.node_list[-1].indent = self.indentation
elif self.next:
result[-1].value.node_list[-1].indent = self.indentation
return result
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "finally":
return self._convert_input_to_one_indented_member("finally", string, parent, on_attribute)
else:
return super(TryNode, self)._string_to_node(string, parent=parent, on_attribute=on_attribute)
def __setattr__(self, name, value):
if name == "finally_":
name = "finally"
return super(TryNode, self).__setattr__(name, value)
def _get_last_member_to_clean(self):
if self.finally_:
return self.finally_
if self.else_:
return self.else_
return self.excepts[-1]
def __getattr__(self, name):
if name == "finally_":
return getattr(self, "finally")
return super(TryNode, self).__getattr__(name)
class TupleNode(Node, LiteralyEvaluable):
def _string_to_node_list(self, string, parent, on_attribute):
fst = baron.parse("(%s)" % string)[0]["value"]
# I assume that I've got an AssociativeParenthesisNode here instead of a tuple
# because string is only one single element
if not isinstance(fst, list):
fst = baron.parse("(%s,)" % string)[0]["value"]
return NodeList.from_fst(fst, parent=parent, on_attribute=on_attribute)
def __setattr__(self, key, value):
super(TupleNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, CommaProxyList):
setattr(self, "value", CommaProxyList(self.value))
class UnicodeStringNode(Node, LiteralyEvaluable):
pass
class UnicodeRawStringNode(Node, LiteralyEvaluable):
pass
class UnitaryOperatorNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "target":
return Node.from_fst(baron.parse("-%s" % string)[0]["target"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class YieldNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
self.formatting = [{"type": "space", "value": " "}] if string else []
if string:
return Node.from_fst(baron.parse("yield %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class YieldFromNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("yield from %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class YieldAtomNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
self.second_formatting = [{"type": "space", "value": " "}] if string else []
if string:
return Node.from_fst(baron.parse("yield %s" % string)[0]["value"], parent=parent, on_attribute=on_attribute)
else:
raise Exception("Unhandled case")
class WhileNode(ElseAttributeNode):
@property
def next_intuitive(self):
if self.else_:
return self.else_
return self.next
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "test":
return Node.from_fst(baron.parse("while %s: pass" % string)[0]["test"], parent=parent, on_attribute=on_attribute)
else:
return super(WhileNode, self)._string_to_node(string, parent, on_attribute)
def __setattr__(self, key, value):
super(WhileNode, self).__setattr__(key, value)
if key == "value" and not isinstance(self.value, LineProxyList):
setattr(self, "value", LineProxyList(self.value, on_attribute="value"))
class WithContextItemNode(Node):
def _string_to_node(self, string, parent, on_attribute):
if on_attribute == "value":
return Node.from_fst(baron.parse("with %s: pass" % string)[0]["contexts"][0]["value"], parent=parent, on_attribute=on_attribute)
elif on_attribute == "as":
if string:
self.first_formatting = [{"type": "space", "value": " "}]
self.second_formatting = [{"type": "space", "value": " "}]
return Node.from_fst(baron.parse("with a as %s: pass" % string)[0]["contexts"][0]["as"], parent=parent, on_attribute=on_attribute)
else:
self.first_formatting = []
self.second_formatting = []
return ""
else:
raise Exception("Unhandled case")
def __getattr__(self, name):
if name == "as_":
return getattr(self, "as")
return super(WithContextItemNode, self).__getattr__(name)
def __setattr__(self, name, value):
if name == "as_":
name = "as"
return super(WithContextItemNode, self).__setattr__(name, value)
class WithNode(CodeBlockNode):
def _string_to_node_list(self, string, parent, on_attribute):
if on_attribute == "contexts":
return NodeList.from_fst(baron.parse("with %s: pass" % string)[0]["contexts"], parent=parent, on_attribute=on_attribute)
else:
return super(WithNode, self)._string_to_node_list(string, parent, on_attribute)
def __setattr__(self, key, value):
super(WithNode, self).__setattr__(key, value)
if key == "contexts" and not isinstance(self.contexts, CommaProxyList):
setattr(self, "contexts", CommaProxyList(self.contexts, on_attribute="contexts"))
if key in ("async", "async_") and getattr(self, "async") and hasattr(self, "async_formatting") and not self.async_formatting:
self.async_formatting = " "
| 37.635719 | 212 | 0.626601 | 6,345 | 53,104 | 5.034515 | 0.051852 | 0.140496 | 0.093664 | 0.071625 | 0.820498 | 0.798241 | 0.772164 | 0.75648 | 0.733753 | 0.696782 | 0 | 0.003773 | 0.24147 | 53,104 | 1,410 | 213 | 37.662411 | 0.789255 | 0.017739 | 0 | 0.614815 | 0 | 0.004233 | 0.115012 | 0.006443 | 0 | 0 | 0 | 0 | 0.007407 | 1 | 0.125926 | false | 0.056085 | 0.015873 | 0.005291 | 0.42328 | 0.003175 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
641478775bc3b502acb7437d3057591ddc8ca05a | 40,636 | py | Python | baselines/deepq/assembly/fuzzy_control.py | DengYuelin/baselines-assembly | d40171845349395f0ed389d725873b389b08f94f | [
"MIT"
] | 1 | 2020-08-07T07:05:30.000Z | 2020-08-07T07:05:30.000Z | baselines/deepq/assembly/fuzzy_control.py | DengYuelin/baselines-assembly | d40171845349395f0ed389d725873b389b08f94f | [
"MIT"
] | null | null | null | baselines/deepq/assembly/fuzzy_control.py | DengYuelin/baselines-assembly | d40171845349395f0ed389d725873b389b08f94f | [
"MIT"
] | 3 | 2018-12-20T10:10:57.000Z | 2020-08-07T10:12:57.000Z | # -*- coding: utf-8 -*-
"""
# @Time : 23/10/18 9:10 PM
# @Author : ZHIMIN HOU
# @FileName: fuzzy_control.py
# @Software: PyCharm
# @Github : https://github.com/hzm2016
"""
import matplotlib.pyplot as plt
import tensorflow as tf
import argparse
import numpy as np
import copy as cp
import skfuzzy.control as ctrl
from mpl_toolkits.mplot3d import Axes3D # Required for 3D plotting
class fuzzy_control(object):
# the input is six forces and moments
# the output is the hyperpapermeters [Kpz, kpx, kpy, krx, kry, krz]
def __init__(self,
low_input=np.array([-40, -40, -40, -5, -5, -5]),
high_input=np.array([40, 40, 0, 5, 5, 5]),
low_output=np.array([0., 0., 0., 0., 0., 0.]),
high_output=np.array([0.015, 0.015, 0.02, 0.015, 0.015, 0.015])):
self.low_input = low_input
self.high_input = high_input
self.low_output = low_output
self.high_output = high_output
self.num_input = 5
self.num_output = 3
self.num_mesh = 21
self.sim_kpx = self.build_fuzzy_kpx()
self.sim_kpy = self.build_fuzzy_kpy()
self.sim_kpz = self.build_fuzzy_kpz()
self.sim_krx = self.build_fuzzy_krx()
self.sim_kry = self.build_fuzzy_kry()
self.sim_krz = self.build_fuzzy_krz()
# self.sim_kpx, self.sim_kpy, self.sim_kpz, \
# self.sim_krx, self.sim_kry, self.sim_krz = self.build_fuzzy_system()
def get_output(self, force):
self.sim_kpx.input['fx'] = force[0]
self.sim_kpx.input['my'] = force[4]
self.sim_kpx.compute()
kpx = self.sim_kpx.output['kpx']
index_3 = force[1]
index_4 = force[3]
self.sim_kpy.input['fy'] = index_3
self.sim_kpy.input['mx'] = index_4
self.sim_kpy.compute()
kpy = self.sim_kpy.output['kpy']
index_5 = force[0]
index_6 = force[1]
self.sim_kpz.input['fx'] = index_5
self.sim_kpz.input['fy'] = index_6
self.sim_kpz.compute()
kpz = self.sim_kpz.output['kpz']
index_7 = force[1]
index_8 = force[3]
self.sim_krx.input['fy'] = index_7
self.sim_krx.input['mx'] = index_8
self.sim_krx.compute()
krx = self.sim_krx.output['krx']
index_9 = force[0]
index_10 = force[4]
self.sim_kry.input['fx'] = index_9
self.sim_kry.input['my'] = index_10
self.sim_kry.compute()
kry = self.sim_kry.output['kry']
index_11 = force[5]
index_12 = force[3]
self.sim_krz.input['mz'] = index_11
self.sim_krz.input['mx'] = index_12
self.sim_krz.compute()
krz = self.sim_krz.output['krz']
# index_1 = (force[0] - self.low_input[0])/(self.high_input[0] - self.low_input[0]) * self.num_mesh
# index_2 = (force[4] - self.low_input[4])/(self.high_input[4] - self.low_input[4]) * self.num_mesh
# self.sim_kpx.input['fx'] = index_1
# self.sim_kpx.input['my'] = index_2
# self.sim_kpx.compute()
# kpx = self.sim_kpx.output['kpx']
#
# index_3 = (force[1] - self.low_input[1]) / (self.high_input[1] - self.low_input[1]) * self.num_mesh
# index_4 = (force[3] - self.low_input[3]) / (self.high_input[3] - self.low_input[3]) * self.num_mesh
# self.sim_kry.input['fy'] = index_3
# self.sim_kry.input['mx'] = index_4
# self.sim_kry.compute()
# kpy = self.sim_kry.output['kpy']
#
# index_5 = (force[0] - self.low_input[0]) / (self.high_input[0] - self.low_input[0]) * self.num_mesh
# index_6 = (force[1] - self.low_input[1]) / (self.high_input[1] - self.low_input[1]) * self.num_mesh
# self.sim_kpz.input['fx'] = index_5
# self.sim_krz.input['fy'] = index_6
# self.sim_krz.compute()
# kpz = self.sim_krz.output['kpz']
#
# index_7 = (force[1] - self.low_input[1]) / (self.high_input[1] - self.low_input[1]) * self.num_mesh
# index_8 = (force[3] - self.low_input[3]) / (self.high_input[3] - self.low_input[3]) * self.num_mesh
# self.sim_krx.input['fy'] = index_7
# self.sim_krx.input['mx'] = index_8
# self.sim_krx.compute()
# krx = self.sim_krx.output['krx']
#
# index_9 = (force[0] - self.low_input[0]) / (self.high_input[0] - self.low_input[0]) * self.num_mesh
# index_10 = (force[4] - self.low_input[4]) / (self.high_input[4] - self.low_input[4]) * self.num_mesh
# self.sim_kry.input['fy'] = index_9
# self.sim_kry.input['mx'] = index_10
# self.sim_kry.compute()
# kry = self.sim_kry.output['kry']
#
# index_11 = (force[5] - self.low_input[5]) / (self.high_input[5] - self.low_input[5]) * self.num_mesh
# index_12 = (force[3] - self.low_input[3]) / (self.high_input[3] - self.low_input[3]) * self.num_mesh
# self.sim_krz.input['mz'] = index_11
# self.sim_krz.input['mx'] = index_12
# self.sim_krz.compute()
# krz = self.sim_krz.output['krx']
return [round(kpx, 5), round(kpy, 5), round(kpz, 5,), round(krx, 5), round(kry, 5), round(krz, 5)]
def plot_rules(self):
self.unsampled = []
for i in range(6):
self.unsampled.append(np.linspace(self.low_input[i], self.high_input[i], 21))
plt.figure(figsize=(15, 15), dpi=100)
plt.title('Fuzzy Rules')
plt.tight_layout(pad=3, w_pad=0.5, h_pad=1.0)
plt.subplots_adjust(left=0.065, bottom=0.1, right=0.995, top=0.9, wspace=0.2, hspace=0.2)
"""kpx"""
upsampled_x = self.unsampled[0]
upsampled_y = self.unsampled[4]
x, y = np.meshgrid(upsampled_x, upsampled_y)
z = np.zeros_like(x)
# Loop through the system 21*21 times to collect the control surface
for i in range(21):
for j in range(21):
self.sim_kpx.input['fx'] = x[i, j]
self.sim_kpx.input['my'] = y[i, j]
self.sim_kpx.compute()
z[i, j] = self.sim_kpx.output['kpx']
ax = plt.subplot(231, projection='3d')
# ax = fig.add_subplot(231, projection='3d')
surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
linewidth=0.4, antialiased=True)
ax.view_init(45, 200)
"""kpy"""
upsampled_x = self.unsampled[1]
upsampled_y = self.unsampled[3]
x, y = np.meshgrid(upsampled_x, upsampled_y)
z = np.zeros_like(x)
# Loop through the system 21*21 times to collect the control surface
for i in range(21):
for j in range(21):
self.sim_kpy.input['fy'] = x[i, j]
self.sim_kpy.input['mx'] = y[i, j]
self.sim_kpy.compute()
z[i, j] = self.sim_kpy.output['kpy']
ax = plt.subplot(232, projection='3d')
# ax = fig.add_subplot(232, projection='3d')
surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
linewidth=0.4, antialiased=True)
ax.view_init(20, 200)
"""kpz"""
upsampled_x = self.unsampled[0]
upsampled_y = self.unsampled[1]
x, y = np.meshgrid(upsampled_x, upsampled_y)
z = np.zeros_like(x)
# Loop through the system 21*21 times to collect the control surface
for i in range(21):
for j in range(21):
self.sim_kpz.input['fx'] = x[i, j]
self.sim_kpz.input['fy'] = y[i, j]
self.sim_kpz.compute()
z[i, j] = self.sim_kpz.output['kpz']
ax = plt.subplot(233, projection='3d')
# ax = fig.add_subplot(233, projection='3d')
surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
linewidth=0.4, antialiased=True)
ax.view_init(20, 200)
"""krx"""
upsampled_x = self.unsampled[1]
upsampled_y = self.unsampled[3]
x, y = np.meshgrid(upsampled_x, upsampled_y)
z = np.zeros_like(x)
# Loop through the system 21*21 times to collect the control surface
for i in range(21):
for j in range(21):
self.sim_krx.input['fy'] = x[i, j]
self.sim_krx.input['mx'] = y[i, j]
self.sim_krx.compute()
z[i, j] = self.sim_krx.output['krx']
ax = plt.subplot(234, projection='3d')
# ax = fig.add_subplot(234, projection='3d')
surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
linewidth=0.4, antialiased=True)
ax.view_init(20, 200)
"""kry"""
upsampled_x = self.unsampled[1]
upsampled_y = self.unsampled[3]
x, y = np.meshgrid(upsampled_x, upsampled_y)
z = np.zeros_like(x)
# Loop through the system 21*21 times to collect the control surface
for i in range(21):
for j in range(21):
self.sim_kry.input['fx'] = x[i, j]
self.sim_kry.input['my'] = y[i, j]
self.sim_kry.compute()
z[i, j] = self.sim_kry.output['kry']
ax = plt.subplot(235, projection='3d')
# ax = fig.add_subplot(235, projection='3d')
surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
linewidth=0.4, antialiased=True)
ax.view_init(20, 200)
"""krz"""
upsampled_x = self.unsampled[3]
upsampled_y = self.unsampled[5]
x, y = np.meshgrid(upsampled_x, upsampled_y)
z = np.zeros_like(x)
for i in range(21):
for j in range(21):
self.sim_krz.input['mx'] = x[i, j]
self.sim_krz.input['mz'] = y[i, j]
self.sim_krz.compute()
z[i, j] = self.sim_krz.output['krz']
ax = plt.subplot(236, projection='3d')
# ax = fig.add_subplot(236, projection='3d')
surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
linewidth=0.4, antialiased=True)
ax.view_init(20, 200)
plt.show()
def build_fuzzy_kpx(self):
fx_universe = np.linspace(self.low_input[0], self.high_input[0], self.num_input)
my_universe = np.linspace(self.low_input[4], self.high_input[4], self.num_input)
fx = ctrl.Antecedent(fx_universe, 'fx')
my = ctrl.Antecedent(my_universe, 'my')
input_names = ['nb', 'ns', 'ze', 'ps', 'pb']
fx.automf(names=input_names)
my.automf(names=input_names)
kpx_universe = np.linspace(self.low_output[0], self.high_output[0], self.num_output)
kpx = ctrl.Consequent(kpx_universe, 'kpx')
output_names_3 = ['nb', 'ze', 'pb']
kpx.automf(names=output_names_3)
rule_kpx_0 = ctrl.Rule(antecedent=((fx['nb'] & my['ze']) |
(fx['nb'] & my['ns']) |
(fx['pb'] & my['ze']) |
(fx['pb'] & my['ps'])),
consequent=kpx['pb'], label='rule kpx pb')
rule_kpx_1 = ctrl.Rule(antecedent=((fx['ns'] & my['ze']) |
(fx['ns'] & my['ns']) |
(fx['ns'] & my['nb']) |
(fx['nb'] & my['nb']) |
(fx['pb'] & my['pb']) |
(fx['ps'] & my['ps']) |
(fx['ps'] & my['pb']) |
(fx['ps'] & my['ze'])),
consequent=kpx['ze'], label='rule kpx ze')
rule_kpx_2 = ctrl.Rule(antecedent=((fx['ze'] & my['ze']) |
(fx['ze'] & my['ps']) |
(fx['ze'] & my['ns']) |
(fx['ze'] & my['pb']) |
(fx['ze'] & my['nb']) |
(fx['nb'] & my['ps']) |
(fx['nb'] & my['pb']) |
(fx['pb'] & my['ns']) |
(fx['pb'] & my['nb']) |
(fx['ns'] & my['ps']) |
(fx['ns'] & my['pb']) |
(fx['ps'] & my['nb']) |
(fx['ps'] & my['ns'])),
consequent=kpx['nb'], label='rule kpx nb')
system_kpx = ctrl.ControlSystem(rules=[rule_kpx_2, rule_kpx_1, rule_kpx_0])
sim_kpx = ctrl.ControlSystemSimulation(system_kpx, flush_after_run=self.num_mesh * self.num_mesh + 1)
"""kpx"""
# upsampled_x = self.unsampled[0]
# upsampled_y = self.unsampled[4]
# x, y = np.meshgrid(upsampled_x, upsampled_y)
# z = np.zeros_like(x)
#
# # Loop through the system 21*21 times to collect the control surface
# for i in range(21):
# for j in range(21):
# sim_kpx.input['fx'] = x[i, j]
# sim_kpx.input['my'] = y[i, j]
# sim_kpx.compute()
# z[i, j] = sim_kpx.output['kpx']
#
# """ Plot the result in pretty 3D with alpha blending"""
# fig = plt.figure(figsize=(8, 8))
# ax = fig.add_subplot(111, projection='3d')
#
# surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
# linewidth=0.4, antialiased=True)
# plt.show()
return sim_kpx
def build_fuzzy_kpy(self):
fy_universe = np.linspace(self.low_input[1], self.high_input[1], self.num_input)
mx_universe = np.linspace(self.low_input[3], self.high_input[3], self.num_input)
fy = ctrl.Antecedent(fy_universe, 'fy')
mx = ctrl.Antecedent(mx_universe, 'mx')
input_names = ['nb', 'ns', 'ze', 'ps', 'pb']
fy.automf(names=input_names)
mx.automf(names=input_names)
kpy_universe = np.linspace(self.low_output[1], self.high_output[1], self.num_output)
kpy = ctrl.Consequent(kpy_universe, 'kpy')
output_names_3 = ['nb', 'ze', 'pb']
kpy.automf(names=output_names_3)
rule_kpy_0 = ctrl.Rule(antecedent=((fy['nb'] & mx['ns']) |
(fy['nb'] & mx['ze']) |
(fy['pb'] & mx['ze']) |
(fy['pb'] & mx['ps'])),
consequent=kpy['pb'], label='rule_kpy_pb')
rule_kpy_1 = ctrl.Rule(antecedent=((fy['ns'] & mx['ze']) |
(fy['ns'] & mx['ns']) |
(fy['ns'] & mx['nb']) |
(fy['ps'] & mx['ps']) |
(fy['ps'] & mx['pb']) |
(fy['ps'] & mx['ze']) |
(fy['nb'] & mx['nb']) |
(fy['pb'] & mx['pb'])),
consequent=kpy['ze'], label='rule_kpy_ze')
rule_kpy_2 = ctrl.Rule(antecedent=((fy['ze']) |
(fy['nb'] & mx['ps']) |
(fy['nb'] & mx['pb']) |
(fy['pb'] & mx['ns']) |
(fy['pb'] & mx['nb']) |
(fy['ns'] & mx['ps']) |
(fy['ns'] & mx['pb']) |
(fy['ps'] & mx['nb']) |
(fy['ps'] & mx['ns'])),
consequent=kpy['nb'], label='rule_kpy_nb')
system_kpy = ctrl.ControlSystem(rules=[rule_kpy_0, rule_kpy_1, rule_kpy_2])
sim_kpy = ctrl.ControlSystemSimulation(system_kpy, flush_after_run=self.num_mesh * self.num_mesh + 1)
"""kpx"""
# upsampled_x = self.unsampled[1]
# upsampled_y = self.unsampled[3]
# x, y = np.meshgrid(upsampled_x, upsampled_y)
# z = np.zeros_like(x)
#
# # Loop through the system 21*21 times to collect the control surface
# for i in range(21):
# for j in range(21):
# sim_kpy.input['fy'] = x[i, j]
# sim_kpy.input['mx'] = y[i, j]
# sim_kpy.compute()
# z[i, j] = sim_kpy.output['kpy']
#
# """ Plot the result in pretty 3D with alpha blending"""
# fig = plt.figure(figsize=(8, 8))
# ax = fig.add_subplot(111, projection='3d')
#
# surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
# linewidth=0.4, antialiased=True)
# plt.show()
return sim_kpy
def build_fuzzy_kpz(self):
fy_universe = np.linspace(self.low_input[1], self.high_input[1], self.num_input)
fx_universe = np.linspace(self.low_input[0], self.high_input[0], self.num_input)
fy = ctrl.Antecedent(fy_universe, 'fy')
fx = ctrl.Antecedent(fx_universe, 'fx')
input_names = ['nb', 'ns', 'ze', 'ps', 'pb']
fy.automf(names=input_names)
fx.automf(names=input_names)
kpz_universe = np.linspace(self.low_output[2], self.high_output[2], self.num_output)
kpz = ctrl.Consequent(kpz_universe, 'kpz')
output_names_3 = ['nb', 'ze', 'pb']
kpz.automf(names=output_names_3)
rule_kpz_0 = ctrl.Rule(antecedent=((fx['ze'] & fy['ze']) |
(fx['ze'] & fy['ns']) |
(fx['ns'] & fy['ze']) |
(fx['ze'] & fy['ps']) |
(fx['ps'] & fy['ze'])),
consequent=kpz['pb'], label='rule_kpz_pb')
rule_kpz_1 = ctrl.Rule(antecedent=((fx['ns'] & fy['ns']) |
(fx['ps'] & fy['ps']) |
(fx['ns'] & fy['ps']) |
(fx['ps'] & fy['ns'])),
consequent=kpz['ze'], label='rule_kpz_ze')
rule_kpz_2 = ctrl.Rule(antecedent=((fx['nb']) |
(fx['pb']) |
(fy['nb']) |
(fy['pb'])),
consequent=kpz['nb'], label='rule_kpz_nb')
system_kpz = ctrl.ControlSystem(rules=[rule_kpz_0, rule_kpz_1, rule_kpz_2])
sim_kpz = ctrl.ControlSystemSimulation(system_kpz, flush_after_run=self.num_mesh * self.num_mesh + 1)
"""kpx"""
# upsampled_x = self.unsampled[0]
# upsampled_y = self.unsampled[1]
# x, y = np.meshgrid(upsampled_x, upsampled_y)
# z = np.zeros_like(x)
# Loop through the system 21*21 times to collect the control surface
# for i in range(21):
# for j in range(21):
# sim_kpz.input['fx'] = x[i, j]
# sim_kpz.input['fy'] = y[i, j]
# sim_kpz.compute()
# z[i, j] = sim_kpz.output['kpz']
#
# """ Plot the result in pretty 3D with alpha blending"""
# fig = plt.figure(figsize=(8, 8))
# ax = fig.add_subplot(111, projection='3d')
#
# surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
# linewidth=0.4, antialiased=True)
# plt.show()
return sim_kpz
def build_fuzzy_krx(self):
fy_universe = np.linspace(self.low_input[1], self.high_input[1], self.num_input)
mx_universe = np.linspace(self.low_input[3], self.high_input[3], self.num_input)
fy = ctrl.Antecedent(fy_universe, 'fy')
mx = ctrl.Antecedent(mx_universe, 'mx')
input_names = ['nb', 'ns', 'ze', 'ps', 'pb']
fy.automf(names=input_names)
mx.automf(names=input_names)
krx_universe = np.linspace(self.low_output[3], self.high_output[3], 3)
krx = ctrl.Consequent(krx_universe, 'krx')
output_names_2 = ['nb', 'ze', 'pb']
krx.automf(names=output_names_2)
rule_krx_0 = ctrl.Rule(antecedent=((mx['nb'] & fy['ze']) |
(mx['nb'] & fy['ns']) |
(mx['pb'] & fy['ze']) |
(mx['pb'] & fy['ps'])),
consequent=krx['pb'], label='rule_krx_pb')
rule_krx_1 = ctrl.Rule(antecedent=((mx['ze']) |
(mx['ns']) |
(mx['ps']) |
(mx['nb'] & fy['nb']) |
(mx['nb'] & fy['ps']) |
(mx['nb'] & fy['pb']) |
(mx['pb'] & fy['pb']) |
(mx['pb'] & fy['ns']) |
(mx['pb'] & fy['nb'])),
consequent=krx['nb'], label='rule_krx_ze')
system_krx = ctrl.ControlSystem(rules=[rule_krx_0, rule_krx_1])
sim_krx = ctrl.ControlSystemSimulation(system_krx, flush_after_run=self.num_mesh * self.num_mesh + 1)
# upsampled_x = self.unsampled[1]
# upsampled_y = self.unsampled[3]
# x, y = np.meshgrid(upsampled_x, upsampled_y)
# z = np.zeros_like(x)
#
# # Loop through the system 21*21 times to collect the control surface
# for i in range(21):
# for j in range(21):
# sim_krx.input['fy'] = x[i, j]
# sim_krx.input['mx'] = y[i, j]
# sim_krx.compute()
# z[i, j] = sim_krx.output['krx']
#
# """ Plot the result in pretty 3D with alpha blending"""
# fig = plt.figure(figsize=(8, 8))
# ax = fig.add_subplot(111, projection='3d')
#
# surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
# linewidth=0.4, antialiased=True)
# plt.show()
return sim_krx
def build_fuzzy_kry(self):
fx_universe = np.linspace(self.low_input[0], self.high_input[0], self.num_input)
my_universe = np.linspace(self.low_input[4], self.high_input[4], self.num_input)
fx = ctrl.Antecedent(fx_universe, 'fx')
my = ctrl.Antecedent(my_universe, 'my')
input_names = ['nb', 'ns', 'ze', 'ps', 'pb']
fx.automf(names=input_names)
my.automf(names=input_names)
kry_universe = np.linspace(self.low_output[4], self.high_output[4], 3)
kry = ctrl.Consequent(kry_universe, 'kry')
output_names_2 = ['nb', 'ze', 'pb']
kry.automf(names=output_names_2)
rule_kry_0 = ctrl.Rule(antecedent=((my['nb'] & fx['ze']) |
(my['nb'] & fx['ns']) |
(my['pb'] & fx['ze']) |
(my['pb'] & fx['ps'])),
consequent=kry['pb'], label='rule_kry_pb')
rule_kry_1 = ctrl.Rule(antecedent=((my['ze']) |
(my['ns']) |
(my['ps']) |
(my['nb'] & fx['nb']) |
(my['pb'] & fx['pb']) |
(my['nb'] & fx['ps']) |
(my['pb'] & fx['ns']) |
(my['nb'] & fx['pb']) |
(my['pb'] & fx['nb'])),
consequent=kry['nb'], label='rule_kry_nb')
system_kry = ctrl.ControlSystem(rules=[rule_kry_0, rule_kry_1])
sim_kry = ctrl.ControlSystemSimulation(system_kry, flush_after_run=self.num_mesh * self.num_mesh + 1)
# upsampled_x = self.unsampled[1]
# upsampled_y = self.unsampled[3]
# x, y = np.meshgrid(upsampled_x, upsampled_y)
# z = np.zeros_like(x)
#
# # Loop through the system 21*21 times to collect the control surface
# for i in range(21):
# for j in range(21):
# sim_kry.input['fx'] = x[i, j]
# sim_kry.input['my'] = y[i, j]
# sim_kry.compute()
# z[i, j] = sim_kry.output['kry']
#
# """ Plot the result in pretty 3D with alpha blending"""
# fig = plt.figure(figsize=(8, 8))
# ax = fig.add_subplot(111, projection='3d')
#
# surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
# linewidth=0.4, antialiased=True)
# plt.show()
return sim_kry
def build_fuzzy_krz(self):
mx_universe = np.linspace(self.low_input[3], self.high_input[3], self.num_input)
mz_universe = np.linspace(self.low_input[5], self.high_input[5], self.num_input)
mx = ctrl.Antecedent(mx_universe, 'mx')
mz = ctrl.Antecedent(mz_universe, 'mz')
input_names = ['nb', 'ns', 'ze', 'ps', 'pb']
mx.automf(names=input_names)
mz.automf(names=input_names)
krz_universe = np.linspace(self.low_output[5], self.high_output[5], 3)
krz = ctrl.Consequent(krz_universe, 'krz')
output_names_2 = ['nb', 'ze', 'pb']
krz.automf(names=output_names_2)
rule_krz_0 = ctrl.Rule(antecedent=((mz['nb'] & mx['ze']) |
(mz['nb'] & mx['ps']) |
(mz['nb'] & mx['ns']) |
(mz['pb'] & mx['ns']) |
(mz['pb'] & mx['ze']) |
(mz['pb'] & mx['ps'])),
consequent=krz['pb'], label='rule_krz_pb')
rule_krz_1 = ctrl.Rule(antecedent=((mz['ze']) |
(mz['ns']) |
(mz['ps']) |
(mz['nb'] & mx['nb']) |
(mz['nb'] & mx['pb']) |
(mz['pb'] & mx['pb']) |
(mz['pb'] & mx['nb'])),
consequent=krz['nb'], label='rule_krz_nb')
system_krz = ctrl.ControlSystem(rules=[rule_krz_0, rule_krz_1])
sim_krz = ctrl.ControlSystemSimulation(system_krz, flush_after_run=self.num_mesh * self.num_mesh + 1)
# upsampled_x = self.unsampled[3]
# upsampled_y = self.unsampled[5]
# x, y = np.meshgrid(upsampled_x, upsampled_y)
# z = np.zeros_like(x)
#
# for i in range(21):
# for j in range(21):
# sim_krz.input['mx'] = x[i, j]
# sim_krz.input['mz'] = y[i, j]
# sim_krz.compute()
# z[i, j] = sim_krz.output['krz']
#
# """ Plot the result in pretty 3D with alpha blending"""
# fig = plt.figure(figsize=(8, 8))
# ax = fig.add_subplot(111, projection='3d')
#
# surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap='viridis',
# linewidth=0.4, antialiased=True)
# plt.show()
return sim_krz
def build_fuzzy_system(self):
# Sparse universe makes calculations faster, without sacrifice accuracy.
# Only the critical points are included here; making it higher resolution is
# unnecessary.
"""============================================================="""
low_force = self.low_input
high_force = self.high_input
num_input = self.num_input
fx_universe = np.linspace(low_force[0], high_force[0], num_input)
fy_universe = np.linspace(low_force[1], high_force[1], num_input)
fz_universe = np.linspace(low_force[2], high_force[2], num_input)
mx_universe = np.linspace(low_force[3], low_force[3], num_input)
my_universe = np.linspace(low_force[4], low_force[4], num_input)
mz_universe = np.linspace(low_force[5], low_force[5], num_input)
"""Create the three fuzzy variables - two inputs, one output"""
fx = ctrl.Antecedent(fx_universe, 'fx')
fy = ctrl.Antecedent(fy_universe, 'fy')
fz = ctrl.Antecedent(fz_universe, 'fz')
mx = ctrl.Antecedent(mx_universe, 'mx')
my = ctrl.Antecedent(my_universe, 'my')
mz = ctrl.Antecedent(mz_universe, 'mz')
input_names = ['nb', 'ns', 'ze', 'ps', 'pb']
fx.automf(names=input_names)
fy.automf(names=input_names)
fz.automf(names=input_names)
mx.automf(names=input_names)
my.automf(names=input_names)
mz.automf(names=input_names)
"""============================================================="""
"""Create the outputs"""
kpx_universe = np.linspace(self.low_output[0], self.high_output[0], self.num_output)
kpy_universe = np.linspace(self.low_output[1], self.high_output[1], self.num_output)
kpz_universe = np.linspace(self.low_output[2], self.high_output[2], self.num_output)
krx_universe = np.linspace(self.low_output[3], self.high_output[3], 3)
kry_universe = np.linspace(self.low_output[4], self.high_output[4], 3)
krz_universe = np.linspace(self.low_output[5], self.high_output[5], 3)
kpx = ctrl.Consequent(kpx_universe, 'kpx')
kpy = ctrl.Consequent(kpy_universe, 'kpy')
kpz = ctrl.Consequent(kpz_universe, 'kpz')
krx = ctrl.Consequent(krx_universe, 'krx')
kry = ctrl.Consequent(kry_universe, 'kry')
krz = ctrl.Consequent(krz_universe, 'krz')
output_names_3 = ['nb', 'ze', 'pb']
# Here we use the convenience `automf` to populate the fuzzy variables with
# terms. The optional kwarg `names=` lets us specify the names of our Terms.
kpx.automf(names=output_names_3)
kpy.automf(names=output_names_3)
kpz.automf(names=output_names_3)
krx.automf(names=output_names_3)
kry.automf(names=output_names_3)
krz.automf(names=output_names_3)
# define the rules for the desired force fx and my
# ===============================================================
rule_kpx_0 = ctrl.Rule(antecedent=((fx['nb'] & my['ze']) |
(fx['nb'] & my['ns']) |
(fx['pb'] & my['ze']) |
(fx['pb'] & my['ps'])),
consequent=kpx['pb'], label='rule kpx pb')
rule_kpx_1 = ctrl.Rule(antecedent=((fx['ns'] & my['ze']) |
(fx['ns'] & my['ns']) |
(fx['ns'] & my['nb']) |
(fx['nb'] & my['nb']) |
(fx['pb'] & my['pb']) |
(fx['ps'] & my['ps']) |
(fx['ps'] & my['pb']) |
(fx['ps'] & my['ze'])),
consequent=kpx['ze'], label='rule kpx ze')
rule_kpx_2 = ctrl.Rule(antecedent=((fx['ze'] & my['ze']) |
(fx['ze'] & my['ps']) |
(fx['ze'] & my['ns']) |
(fx['ze'] & my['pb']) |
(fx['ze'] & my['nb']) |
(fx['nb'] & my['ps']) |
(fx['nb'] & my['pb']) |
(fx['pb'] & my['ns']) |
(fx['pb'] & my['nb']) |
(fx['ns'] & my['ps']) |
(fx['ns'] & my['pb']) |
(fx['ps'] & my['nb']) |
(fx['ps'] & my['ns'])),
consequent=kpx['nb'], label='rule kpx nb')
system_kpx = ctrl.ControlSystem(rules=[rule_kpx_2, rule_kpx_1, rule_kpx_0])
sim_kpx = ctrl.ControlSystemSimulation(system_kpx, flush_after_run=self.num_mesh * self.num_mesh + 1)
# define the rules for the desired force fy and mz
# ===============================================================
rule_kpy_0 = ctrl.Rule(antecedent=((fy['nb'] & mx['ns']) |
(fy['nb'] & mx['ze']) |
(fy['pb'] & mx['ze']) |
(fy['pb'] & mx['ps'])),
consequent=kpy['pb'], label='rule_kpy_pb')
rule_kpy_1 = ctrl.Rule(antecedent=((fy['ns'] & mx['ze']) |
(fy['ns'] & mx['ns']) |
(fy['ns'] & mx['nb']) |
(fy['ps'] & mx['ps']) |
(fy['ps'] & mx['pb']) |
(fy['ps'] & mx['ze']) |
(fy['nb'] & mx['nb']) |
(fy['pb'] & mx['pb'])),
consequent=kpy['ze'], label='rule_kpy_ze')
rule_kpy_2 = ctrl.Rule(antecedent=((fy['ze']) |
(fy['nb'] & mx['ps']) |
(fy['nb'] & mx['pb']) |
(fy['pb'] & mx['ns']) |
(fy['pb'] & mx['nb']) |
(fy['ns'] & mx['ps']) |
(fy['ns'] & mx['pb']) |
(fy['ps'] & mx['nb']) |
(fy['ps'] & mx['ns'])),
consequent=kpy['nb'], label='rule_kpy_nb')
system_kpy = ctrl.ControlSystem(rules=[rule_kpy_0, rule_kpy_1, rule_kpy_2])
sim_kpy = ctrl.ControlSystemSimulation(system_kpy, flush_after_run=self.num_mesh * self.num_mesh + 1)
# ===============================================================
rule_kpz_0 = ctrl.Rule(antecedent=((fx['ze'] & fy['ze']) |
(fx['ze'] & fy['ns']) |
(fx['ns'] & fy['ze']) |
(fx['ze'] & fy['ps']) |
(fx['ps'] & fy['ze'])),
consequent=kpz['pb'], label='rule_kpz_pb')
rule_kpz_1 = ctrl.Rule(antecedent=((fx['ns'] & fy['ns']) |
(fx['ps'] & fy['ps']) |
(fx['ns'] & fy['ps']) |
(fx['ps'] & fy['ns'])),
consequent=kpz['ze'], label='rule_kpz_ze')
rule_kpz_2 = ctrl.Rule(antecedent=((fx['nb']) |
(fx['pb']) |
(fy['nb']) |
(fy['pb'])),
consequent=kpz['nb'], label='rule_kpz_nb')
system_kpz = ctrl.ControlSystem(rules=[rule_kpz_0, rule_kpz_1, rule_kpz_2])
sim_kpz = ctrl.ControlSystemSimulation(system_kpz, flush_after_run=self.num_mesh * self.num_mesh + 1)
# ===============================================================
rule_krx_0 = ctrl.Rule(antecedent=((mx['nb'] & fy['ze']) |
(mx['nb'] & fy['ns']) |
(mx['pb'] & fy['ze']) |
(mx['pb'] & fy['ps'])),
consequent=krx['pb'], label='rule_krx_pb')
rule_krx_1 = ctrl.Rule(antecedent=((mx['ze']) |
(mx['ns']) |
(mx['ps']) |
(mx['nb'] & fy['nb']) |
(mx['nb'] & fy['ps']) |
(mx['nb'] & fy['pb']) |
(mx['pb'] & fy['pb']) |
(mx['pb'] & fy['ns']) |
(mx['pb'] & fy['nb'])),
consequent=krx['nb'], label='rule_krx_ze')
system_krx = ctrl.ControlSystem(rules=[rule_krx_0, rule_krx_1])
sim_krx = ctrl.ControlSystemSimulation(system_krx, flush_after_run=self.num_mesh * self.num_mesh + 1)
# ===============================================================
rule_kry_0 = ctrl.Rule(antecedent=((my['nb'] & fx['ze']) |
(my['nb'] & fx['ns']) |
(my['pb'] & fx['ze']) |
(my['pb'] & fx['ps'])),
consequent=kry['pb'], label='rule_kry_pb')
rule_kry_1 = ctrl.Rule(antecedent=((my['ze']) |
(my['ns']) |
(my['ps']) |
(my['nb'] & fx['nb']) |
(my['pb'] & fx['pb']) |
(my['nb'] & fx['ps']) |
(my['pb'] & fx['ns']) |
(my['nb'] & fx['pb']) |
(my['pb'] & fx['nb'])),
consequent=kry['nb'], label='rule_kry_nb')
system_kry = ctrl.ControlSystem(rules=[rule_kry_0, rule_kry_1])
sim_kry = ctrl.ControlSystemSimulation(system_kry, flush_after_run=self.num_mesh * self.num_mesh + 1)
# ===============================================================
rule_krz_0 = ctrl.Rule(antecedent=((mz['nb'] & mx['ze']) |
(mz['nb'] & mx['ps']) |
(mz['nb'] & mx['ps']) |
(mz['pb'] & mx['ns']) |
(mz['pb'] & mx['ze']) |
(mz['pb'] & mx['ps'])),
consequent=krz['pb'], label='rule_krz_pb')
rule_krz_1 = ctrl.Rule(antecedent=((mz['ze']) |
(mz['ns']) |
(mz['ps']) |
(mz['nb'] & mx['nb']) |
(mz['pb'] & mx['pb']) |
(mz['nb'] & mx['pb']) |
(mz['pb'] & mx['nb'])),
consequent=krz['nb'], label='rule_krz_nb')
system_krz = ctrl.ControlSystem(rules=[rule_krz_0, rule_krz_1])
sim_krz = ctrl.ControlSystemSimulation(system_krz, flush_after_run=self.num_mesh * self.num_mesh + 1)
return sim_kpx, sim_kpy, sim_kpz, sim_krx, sim_kry, sim_krz
if __name__ == "__main__":
fuzzy_system = fuzzy_control(low_output=np.array([0., 0., 0., 0., 0., 0.]), high_output=np.array([0.02, 0.02, 0.025, 0.015, 0.015, 0.015]))
fuzzy_system.plot_rules()
# kp = fuzzy_system.get_output(np.array([-20, -20, -30, 0, 0.9, 0.7]))[:3]
# fuzzy_system.build_fuzzy_kpx() | 47.976387 | 143 | 0.437469 | 4,712 | 40,636 | 3.593591 | 0.04966 | 0.034725 | 0.028347 | 0.025099 | 0.879171 | 0.838186 | 0.746412 | 0.713341 | 0.70838 | 0.689836 | 0 | 0.028426 | 0.396594 | 40,636 | 847 | 144 | 47.976387 | 0.662153 | 0.191087 | 0 | 0.754253 | 0 | 0 | 0.044989 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018904 | false | 0 | 0.013233 | 0 | 0.049149 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
64291e11159a482e31482791fe5794b59633711e | 3,627 | py | Python | database_managment/Get_Student.py | self-involved/CRM_project | d485e9d60adfe7c60818c2998cda6e71eb9d39d6 | [
"MIT"
] | null | null | null | database_managment/Get_Student.py | self-involved/CRM_project | d485e9d60adfe7c60818c2998cda6e71eb9d39d6 | [
"MIT"
] | null | null | null | database_managment/Get_Student.py | self-involved/CRM_project | d485e9d60adfe7c60818c2998cda6e71eb9d39d6 | [
"MIT"
] | null | null | null | from database_managment import db_connection
def Get_student():
connection = db_connection.connect()
cur = connection.cursor()
sql_language ='''
select name,row_id,'pending' from Student_info;
'''
#sql = ' describe Student_info; '
cur.execute(sql_language)
#cur.execute(sql)
result = cur.fetchall()
cur.close()
connection.commit()
connection.close( )
return result
def Check_student():
connection = db_connection.connect()
cur = connection.cursor()
sql_language ='''
select name from Student_info;
'''
#sql = ' describe Student_info; '
cur.execute(sql_language)
#cur.execute(sql)
result = cur.fetchall()
cur.close()
connection.commit()
connection.close( )
reshaped = []
for each in result:
reshaped.append( each[0] )
return reshaped
def Create_student( student_name ):
connection = db_connection.connect()
cur = connection.cursor()
sql_language ='''
insert into Student_info (name) values( %s );
'''
#sql = ' describe Student_info; '
cur.execute(sql_language,( student_name ))
#cur.execute(sql)
#result = cur.fetchall()
cur.close()
connection.commit()
connection.close( )
def Delete_student( student_name ):
connection = db_connection.connect()
cur = connection.cursor()
sql_language ='''
delete from Student_info where name= %s ;
'''.format( student_name )
#sql = ' describe Student_info; '
cur.execute(sql_language,(student_name,))
#cur.execute(sql)
#result = cur.fetchall()
cur.close()
connection.commit()
connection.close( )
def Check_date( section,name ):
sec = section
stu_name = name
connection = db_connection.connect()
cur = connection.cursor()
sql1 = '''select max(test_date) from writing_band where student_name='{}';
'''.format( stu_name )
sql2 = '''
select max(test_date) from speaking_band where student_name='{}';
'''.format( stu_name )
sql3 = '''
select max(test_date) from correction_rl where type='L' and student_name='{}';
'''.format( stu_name )
sql4 = '''
select max(test_date) from correction_rl where type='R' and student_name='{}';
'''.format( stu_name )
if sec =='writing':
cur.execute(sql1)
elif sec =='speaking':
cur.execute(sql2)
elif sec =='reading':
cur.execute(sql4)
elif sec =='listening':
cur.execute(sql3)
result = cur.fetchall()
connection.close()
return result
def Visualization( stu,sec,start,end ):
connection = db_connection.connect()
cur = connection.cursor()
sql1 = '''select test_date,total_cal from writing_band where student_name='{0}' and (test_date between '{1}' and '{2}');
'''.format( stu,start,end )
sql2 = '''
select test_date,total_input from speaking_band where student_name='{0}' and (test_date between '{1}' and '{2}');
'''.format( stu,start,end )
sql3 = '''
select test_date,band from correction_rl where type='L' and student_name='{0}' and (test_date between '{1}' and '{2}');
'''.format( stu,start,end )
sql4 = '''
select test_date,band from correction_rl where type='R' and student_name='{0}' and (test_date between '{1}' and '{2}');
'''.format( stu,start,end )
if sec =='writing':
cur.execute(sql1)
elif sec =='speaking':
cur.execute(sql2)
elif sec =='reading':
cur.execute(sql4)
elif sec =='listening':
cur.execute(sql3)
result = cur.fetchall()
connection.close()
return result | 30.225 | 128 | 0.62724 | 439 | 3,627 | 5.022779 | 0.168565 | 0.072562 | 0.047166 | 0.078912 | 0.845351 | 0.82449 | 0.791837 | 0.761905 | 0.760091 | 0.615873 | 0 | 0.010398 | 0.231045 | 3,627 | 120 | 129 | 30.225 | 0.780208 | 0.065619 | 0 | 0.72 | 0 | 0.04 | 0.329586 | 0.012426 | 0 | 0 | 0 | 0 | 0 | 1 | 0.06 | false | 0 | 0.01 | 0 | 0.11 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ff296c1f657b8cacda16ccee3d30d2e021ef5a05 | 14,104 | py | Python | tests/unit_test/api/_retry_sample.py | Chisanan232/multirunnable | 7223e49750dc3d3ccf7ebcd3d292138916b582f2 | [
"Apache-2.0"
] | 1 | 2022-03-18T15:20:53.000Z | 2022-03-18T15:20:53.000Z | tests/unit_test/api/_retry_sample.py | Chisanan232/multirunnable | 7223e49750dc3d3ccf7ebcd3d292138916b582f2 | [
"Apache-2.0"
] | null | null | null | tests/unit_test/api/_retry_sample.py | Chisanan232/multirunnable | 7223e49750dc3d3ccf7ebcd3d292138916b582f2 | [
"Apache-2.0"
] | null | null | null | from multirunnable.api.decorator import (
retry, async_retry
)
from collections import namedtuple
_Retry_Time = 4
_Default_Retry_Time = 1
Running_Target_Function_Counter: int = 0
Initial_Handling_Flag_Counter: int = 0
Done_Handling_Flag_Counter: int = 0
Final_Handling_Flag_Counter: int = 0
Error_Handling_Flag_Counter: int = 0
Default_Function_Flag = False
Retry_Function_Flag = False
Default_Method_Flag = False
Retry_Method_Flag = False
Default_Classmethod_Flag = False
Retry_Classmethod_Flag = False
Default_Staticmethod_Flag = False
Retry_Staticmethod_Flag = False
_Async_Running = False
_Test_Return_Value = "TestResult"
_Test_Exception = Exception("Test for raising exception.")
def init_flag() -> None:
"""
Initial and reset value of all flags.
:return:
"""
global Running_Target_Function_Counter, Initial_Handling_Flag_Counter, Done_Handling_Flag_Counter, Final_Handling_Flag_Counter, Error_Handling_Flag_Counter
global Default_Function_Flag, Retry_Function_Flag, Default_Method_Flag, Retry_Method_Flag, Default_Classmethod_Flag, Retry_Classmethod_Flag, Default_Staticmethod_Flag, Retry_Staticmethod_Flag, _Async_Running
Running_Target_Function_Counter = 0
Initial_Handling_Flag_Counter = 0
Done_Handling_Flag_Counter = 0
Final_Handling_Flag_Counter = 0
Error_Handling_Flag_Counter = 0
Default_Function_Flag = False
Retry_Function_Flag = False
Default_Method_Flag = False
Retry_Method_Flag = False
Default_Classmethod_Flag = False
Retry_Classmethod_Flag = False
Default_Staticmethod_Flag = False
Retry_Staticmethod_Flag = False
_Async_Running = False
Process_Flag = namedtuple(
"Process_Flag",
["Running_Target_Function_Counter",
"Initial_Handling_Flag_Counter",
"Done_Handling_Flag_Counter",
"Final_Handling_Flag_Counter",
"Error_Handling_Flag_Counter"]
)
Run_Function_Flag = namedtuple(
"Run_Function_Flag",
["Default_Function_Flag",
"Retry_Function_Flag",
"Default_Method_Flag",
"Retry_Method_Flag",
"Default_Classmethod_Flag",
"Retry_Classmethod_Flag",
"Default_Staticmethod_Flag",
"Retry_Staticmethod_Flag",
"Async_Running"]
)
def get_process_flag() -> Process_Flag:
global Running_Target_Function_Counter, Initial_Handling_Flag_Counter, Done_Handling_Flag_Counter, Final_Handling_Flag_Counter, Error_Handling_Flag_Counter
_process_flag = Process_Flag(
Running_Target_Function_Counter=Running_Target_Function_Counter,
Initial_Handling_Flag_Counter=Initial_Handling_Flag_Counter,
Done_Handling_Flag_Counter=Done_Handling_Flag_Counter,
Error_Handling_Flag_Counter=Error_Handling_Flag_Counter,
Final_Handling_Flag_Counter=Final_Handling_Flag_Counter
)
return _process_flag
def get_running_function_flag() -> Run_Function_Flag:
global Running_Target_Function_Counter, Initial_Handling_Flag_Counter, Done_Handling_Flag_Counter, Final_Handling_Flag_Counter, Error_Handling_Flag_Counter
_run_func_falg = Run_Function_Flag(
Default_Function_Flag=Default_Function_Flag,
Retry_Function_Flag=Retry_Function_Flag,
Default_Method_Flag=Default_Method_Flag,
Retry_Method_Flag=Retry_Method_Flag,
Default_Classmethod_Flag=Default_Classmethod_Flag,
Retry_Classmethod_Flag=Retry_Classmethod_Flag,
Default_Staticmethod_Flag=Default_Staticmethod_Flag,
Retry_Staticmethod_Flag=Retry_Staticmethod_Flag,
Async_Running=_Async_Running
)
return _run_func_falg
def instantiate_retry_decorator() -> None:
retry()
def instantiate_async_retry_decorator() -> None:
async_retry()
@retry.function
def target_function_with_default():
"""
Function using retry mechanism with default processes.
:return:
"""
global Running_Target_Function_Counter, Default_Function_Flag
Running_Target_Function_Counter += 1
Default_Function_Flag = True
return _Test_Return_Value
@retry.function
def target_function_raising_exception_with_default():
"""
Same as target_function_with_default but this would raise an exception.
:return:
"""
global Running_Target_Function_Counter, Default_Function_Flag
Running_Target_Function_Counter += 1
Default_Function_Flag = True
raise _Test_Exception
@retry.function(timeout=_Retry_Time)
def target_function():
"""
Function using retry mechanism and implement all processes.
:return:
"""
global Running_Target_Function_Counter, Retry_Function_Flag
Running_Target_Function_Counter += 1
Retry_Function_Flag = True
return _Test_Return_Value
@target_function.initialization
def _initial_func(*args, **kwargs):
global Initial_Handling_Flag_Counter
Initial_Handling_Flag_Counter += 1
@target_function.done_handling
def _done_func(result):
global Done_Handling_Flag_Counter
Done_Handling_Flag_Counter += 1
return result
@target_function.final_handling
def _final_func():
global Final_Handling_Flag_Counter
Final_Handling_Flag_Counter += 1
@target_function.error_handling
def _error_func(e: Exception):
global Error_Handling_Flag_Counter
Error_Handling_Flag_Counter += 1
return e
@retry.function(timeout=_Retry_Time)
def target_function_raising_exception():
"""
Same as target_function but this would raise an exception.
:return:
"""
global Running_Target_Function_Counter, Retry_Function_Flag
Running_Target_Function_Counter += 1
Retry_Function_Flag = True
raise _Test_Exception
@target_function_raising_exception.initialization
def _initial_func(*args, **kwargs):
global Initial_Handling_Flag_Counter
Initial_Handling_Flag_Counter += 1
@target_function_raising_exception.done_handling
def _done_func(result):
global Done_Handling_Flag_Counter
Done_Handling_Flag_Counter += 1
return result
@target_function_raising_exception.final_handling
def _final_func():
global Final_Handling_Flag_Counter
Final_Handling_Flag_Counter += 1
@target_function_raising_exception.error_handling
def _error_func(e: Exception):
global Error_Handling_Flag_Counter
Error_Handling_Flag_Counter += 1
return e
@async_retry.function
async def async_target_function_with_default():
global Running_Target_Function_Counter, Default_Function_Flag, _Async_Running
Running_Target_Function_Counter += 1
Default_Function_Flag = True
_Async_Running = True
return _Test_Return_Value
@async_retry.function
async def async_target_function_raising_exception_with_default():
global Running_Target_Function_Counter, Default_Function_Flag, _Async_Running
Running_Target_Function_Counter += 1
Default_Function_Flag = True
_Async_Running = True
raise _Test_Exception
@async_retry.function(timeout=_Retry_Time)
async def async_target_function():
global Running_Target_Function_Counter, Retry_Function_Flag, _Async_Running
Running_Target_Function_Counter += 1
Retry_Function_Flag = True
_Async_Running = True
return _Test_Return_Value
@async_target_function.initialization
async def _initial_func(*args, **kwargs):
global Initial_Handling_Flag_Counter
Initial_Handling_Flag_Counter += 1
@async_target_function.done_handling
async def _done_func(result):
global Done_Handling_Flag_Counter
Done_Handling_Flag_Counter += 1
return result
@async_target_function.final_handling
async def _final_func():
global Final_Handling_Flag_Counter
Final_Handling_Flag_Counter += 1
@async_target_function.error_handling
async def _error_func(e: Exception):
global Error_Handling_Flag_Counter
Error_Handling_Flag_Counter += 1
return e
@async_retry.function(timeout=_Retry_Time)
async def async_target_function_raising_exception():
global Running_Target_Function_Counter, Retry_Function_Flag, _Async_Running
Running_Target_Function_Counter += 1
Retry_Function_Flag = True
_Async_Running = True
raise _Test_Exception
@async_target_function_raising_exception.initialization
async def _async_initial_func(*args, **kwargs):
global Initial_Handling_Flag_Counter
Initial_Handling_Flag_Counter += 1
@async_target_function_raising_exception.done_handling
async def _async_done_func(result):
global Done_Handling_Flag_Counter
Done_Handling_Flag_Counter += 1
return result
@async_target_function_raising_exception.final_handling
async def _async_final_func():
global Final_Handling_Flag_Counter
Final_Handling_Flag_Counter += 1
@async_target_function_raising_exception.error_handling
async def _async_error_func(e: Exception):
global Error_Handling_Flag_Counter
Error_Handling_Flag_Counter += 1
return e
class TargetBoundedFunction:
@retry.bounded_function
def target_method_with_default(self):
global Running_Target_Function_Counter, Default_Method_Flag
Running_Target_Function_Counter += 1
Default_Method_Flag = True
return _Test_Return_Value
@retry.bounded_function
def target_method_raising_exception_with_default(self):
global Running_Target_Function_Counter, Default_Method_Flag
Running_Target_Function_Counter += 1
Default_Method_Flag = True
raise _Test_Exception
@retry.bounded_function(timeout=_Retry_Time)
def target_method(self):
global Running_Target_Function_Counter, Retry_Method_Flag
Running_Target_Function_Counter += 1
Retry_Method_Flag = True
return _Test_Return_Value
@target_method.initialization
def initial_function(self, *args, **kwargs):
global Initial_Handling_Flag_Counter
Initial_Handling_Flag_Counter += 1
@target_method.done_handling
def done_function(self, result):
global Done_Handling_Flag_Counter
Done_Handling_Flag_Counter += 1
return result
@target_method.final_handling
def final_function(self):
global Final_Handling_Flag_Counter
Final_Handling_Flag_Counter += 1
@target_method.error_handling
def error_function(self, e: Exception):
global Error_Handling_Flag_Counter
Error_Handling_Flag_Counter += 1
return e
@retry.bounded_function(timeout=_Retry_Time)
def target_method_raising_exception(self):
global Running_Target_Function_Counter, Retry_Method_Flag
Running_Target_Function_Counter += 1
Retry_Method_Flag = True
raise _Test_Exception
@target_method_raising_exception.initialization
def raising_exception_initial_function(self, *args, **kwargs):
global Initial_Handling_Flag_Counter
Initial_Handling_Flag_Counter += 1
@target_method_raising_exception.done_handling
def raising_exception_done_function(self, result):
global Done_Handling_Flag_Counter
Done_Handling_Flag_Counter += 1
return result
@target_method_raising_exception.error_handling
def raising_exception_error_function(self, e: Exception):
global Error_Handling_Flag_Counter
Error_Handling_Flag_Counter += 1
return e
@target_method_raising_exception.final_handling
def raising_exception_final_function(self):
global Final_Handling_Flag_Counter
Final_Handling_Flag_Counter += 1
class TargetBoundedAsyncFunction:
@async_retry.bounded_function
async def target_method_with_default(self):
global Running_Target_Function_Counter, _Async_Running
Running_Target_Function_Counter += 1
_Async_Running = True
return _Test_Return_Value
@async_retry.bounded_function
async def target_method_raising_exception_with_default(self):
global Running_Target_Function_Counter, _Async_Running
Running_Target_Function_Counter += 1
_Async_Running = True
raise _Test_Exception
@async_retry.bounded_function(timeout=_Retry_Time)
async def target_method(self):
global Running_Target_Function_Counter, _Async_Running
Running_Target_Function_Counter += 1
_Async_Running = True
return _Test_Return_Value
@target_method.initialization
async def async_initial_function(self, *args, **kwargs):
global Initial_Handling_Flag_Counter
Initial_Handling_Flag_Counter += 1
@target_method.done_handling
async def async_done_function(self, result):
global Done_Handling_Flag_Counter
Done_Handling_Flag_Counter += 1
return result
@target_method.final_handling
async def async_final_function(self):
global Final_Handling_Flag_Counter
Final_Handling_Flag_Counter += 1
@target_method.error_handling
async def async_error_function(self, e: Exception):
global Error_Handling_Flag_Counter
Error_Handling_Flag_Counter += 1
return e
@async_retry.bounded_function(timeout=_Retry_Time)
async def target_method_raising_exception(self):
global Running_Target_Function_Counter, _Async_Running
Running_Target_Function_Counter += 1
_Async_Running = True
raise _Test_Exception
@target_method_raising_exception.initialization
async def async_raising_exception_initial_function(self, *args, **kwargs):
global Initial_Handling_Flag_Counter
Initial_Handling_Flag_Counter += 1
@target_method_raising_exception.done_handling
async def async_raising_exception_done_function(self, result):
global Done_Handling_Flag_Counter
Done_Handling_Flag_Counter += 1
return result
@target_method_raising_exception.final_handling
async def async_raising_exception_final_function(self):
global Final_Handling_Flag_Counter
Final_Handling_Flag_Counter += 1
@target_method_raising_exception.error_handling
async def async_raising_exception_error_function(self, e: Exception):
global Error_Handling_Flag_Counter
Error_Handling_Flag_Counter += 1
return e
| 28.666667 | 211 | 0.776446 | 1,717 | 14,104 | 5.831101 | 0.043681 | 0.115062 | 0.182181 | 0.111866 | 0.918098 | 0.885038 | 0.846884 | 0.802837 | 0.734419 | 0.705753 | 0 | 0.005156 | 0.174986 | 14,104 | 491 | 212 | 28.725051 | 0.855277 | 0.023256 | 0 | 0.627628 | 0 | 0 | 0.02844 | 0.018643 | 0 | 0 | 0 | 0 | 0 | 1 | 0.087087 | false | 0 | 0.006006 | 0 | 0.177177 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ffa6f8eb9bc15d74a83ffe4313370e9d1ba9675b | 30,764 | py | Python | netapp/santricity/api/v2/service_catalog_api.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 5 | 2016-08-23T17:52:22.000Z | 2019-05-16T08:45:30.000Z | netapp/santricity/api/v2/service_catalog_api.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 2 | 2016-11-10T05:30:21.000Z | 2019-04-05T15:03:37.000Z | netapp/santricity/api/v2/service_catalog_api.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 7 | 2016-08-25T16:11:44.000Z | 2021-02-22T05:31:25.000Z | #!/usr/bin/env python
# coding: utf-8
"""
ServiceCatalogApi.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import absolute_import
import sys
import os
# python 2 and python 3 compatibility library
from six import iteritems
from ....santricity.configuration import Configuration
from ....santricity.api_client import ApiClient
class ServiceCatalogApi(object):
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient(context_path='/devmgr/v2')
self.api_client = config.api_client
def get_all_pools(self, system_id, **kwargs):
"""
Retrieve the list of pools
Mode: Both Embedded and Proxy. Retrieve the list of pools and their service quality features. The list is sorted in descending order by the largest block of freeSpace that each has available.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_pools(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:return: list[PoolQosResponse]
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_pools" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `get_all_pools`")
resource_path = '/storage-systems/{system-id}/ssc/pools'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[PoolQosResponse]',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_all_ssc_volumes(self, system_id, **kwargs):
"""
Retrieve the list of volumes
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_ssc_volumes(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:return: list[UserVolume]
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_ssc_volumes" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `get_all_ssc_volumes`")
resource_path = '/storage-systems/{system-id}/ssc/volumes'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserVolume]',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_pool(self, system_id, pool_id, **kwargs):
"""
Retrieve a pool by name or ID
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_pool(system_id, pool_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str pool_id: (required)
:return: PoolQosResponse
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'pool_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pool" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `get_pool`")
# verify the required parameter 'pool_id' is set
if ('pool_id' not in params) or (params['pool_id'] is None):
raise ValueError("Missing the required parameter `pool_id` when calling `get_pool`")
resource_path = '/storage-systems/{system-id}/ssc/pools/{poolId}'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
if 'pool_id' in params:
path_params['poolId'] = params['pool_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PoolQosResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_ssc_volume(self, system_id, volume_id, **kwargs):
"""
Retrieve a volume by id or label
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_ssc_volume(system_id, volume_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str volume_id: (required)
:return: UserVolume
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'volume_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ssc_volume" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `get_ssc_volume`")
# verify the required parameter 'volume_id' is set
if ('volume_id' not in params) or (params['volume_id'] is None):
raise ValueError("Missing the required parameter `volume_id` when calling `get_ssc_volume`")
resource_path = '/storage-systems/{system-id}/ssc/volumes/{volumeId}'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
if 'volume_id' in params:
path_params['volumeId'] = params['volume_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserVolume',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def new_ssc_volume(self, system_id, **kwargs):
"""
Define a new volume and configure its QOS parameters
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.new_ssc_volume(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param SscVolumeCreateRequest body:
:return: UserVolume
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method new_ssc_volume" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `new_ssc_volume`")
resource_path = '/storage-systems/{system-id}/ssc/volumes'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserVolume',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def remove_ssc_volume(self, system_id, volume_id, **kwargs):
"""
Delete a volume by id or label
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_ssc_volume(system_id, volume_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str volume_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'volume_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_ssc_volume" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `remove_ssc_volume`")
# verify the required parameter 'volume_id' is set
if ('volume_id' not in params) or (params['volume_id'] is None):
raise ValueError("Missing the required parameter `volume_id` when calling `remove_ssc_volume`")
resource_path = '/storage-systems/{system-id}/ssc/volumes/{volumeId}'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
if 'volume_id' in params:
path_params['volumeId'] = params['volume_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_ssc_volume(self, system_id, volume_id, **kwargs):
"""
Update an existing volume
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_ssc_volume(system_id, volume_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param str volume_id: (required)
:param SscVolumeUpdateRequest body:
:return: UserVolume
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'volume_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_ssc_volume" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `update_ssc_volume`")
# verify the required parameter 'volume_id' is set
if ('volume_id' not in params) or (params['volume_id'] is None):
raise ValueError("Missing the required parameter `volume_id` when calling `update_ssc_volume`")
resource_path = '/storage-systems/{system-id}/ssc/volumes/{volumeId}'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
if 'volume_id' in params:
path_params['volumeId'] = params['volume_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserVolume',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 38.216149 | 845 | 0.516253 | 3,024 | 30,764 | 5.094246 | 0.097884 | 0.043622 | 0.025446 | 0.023629 | 0.865693 | 0.849984 | 0.846868 | 0.846868 | 0.830055 | 0.826809 | 0 | 0.001608 | 0.413828 | 30,764 | 804 | 846 | 38.263682 | 0.852604 | 0.331816 | 0 | 0.813665 | 0 | 0 | 0.166676 | 0.019603 | 0 | 0 | 0 | 0 | 0 | 1 | 0.024845 | false | 0 | 0.018634 | 0 | 0.068323 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ffab27ea7c7df1f7066235043ef5c07de314a032 | 143 | py | Python | neural_pipeline/utils/__init__.py | pfriesch/neural-pipeline | 2df4f7467a721b1fbd93f4439086c6dcee5dac2c | [
"MIT"
] | null | null | null | neural_pipeline/utils/__init__.py | pfriesch/neural-pipeline | 2df4f7467a721b1fbd93f4439086c6dcee5dac2c | [
"MIT"
] | null | null | null | neural_pipeline/utils/__init__.py | pfriesch/neural-pipeline | 2df4f7467a721b1fbd93f4439086c6dcee5dac2c | [
"MIT"
] | null | null | null | from .file_structure_manager import FileStructManager, CheckpointsManager
from .utils import dict_recursive_bypass, dict_pair_recursive_bypass
| 47.666667 | 73 | 0.902098 | 17 | 143 | 7.176471 | 0.705882 | 0.245902 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.06993 | 143 | 2 | 74 | 71.5 | 0.917293 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.5 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
ffb9f3e91c6a8822d90ae0c4d8f1c87eee8cce5b | 59,748 | py | Python | tests/unit/test_batch_parser.py | neuro-inc/neuro-flow | 5f793112a63bd27a6cc608ed6bc13e403943d213 | [
"Apache-2.0"
] | 13 | 2020-09-29T17:07:01.000Z | 2021-08-02T02:54:31.000Z | tests/unit/test_batch_parser.py | neuro-inc/neuro-flow | 5f793112a63bd27a6cc608ed6bc13e403943d213 | [
"Apache-2.0"
] | 163 | 2020-09-30T08:50:06.000Z | 2022-03-25T01:04:43.000Z | tests/unit/test_batch_parser.py | neuro-inc/neuro-flow | 5f793112a63bd27a6cc608ed6bc13e403943d213 | [
"Apache-2.0"
] | 1 | 2021-04-14T05:44:34.000Z | 2021-04-14T05:44:34.000Z | import pathlib
import pytest
from yaml.constructor import ConstructorError
from neuro_flow import ast
from neuro_flow.ast import NeedsLevel
from neuro_flow.expr import (
EnableExpr,
IdExpr,
MappingItemsExpr,
OptBashExpr,
OptBoolExpr,
OptIdExpr,
OptIntExpr,
OptLocalPathExpr,
OptRemotePathExpr,
OptStrExpr,
OptTimeDeltaExpr,
PrimitiveExpr,
RemotePathExpr,
SequenceItemsExpr,
SimpleOptIdExpr,
SimpleOptStrExpr,
StrExpr,
URIExpr,
)
from neuro_flow.parser import parse_batch
from neuro_flow.tokenizer import Pos
def test_parse_minimal(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-minimal.yml"
flow = parse_batch(workspace, config_file)
assert flow == ast.BatchFlow(
Pos(0, 0, config_file),
Pos(57, 0, config_file),
id=SimpleOptIdExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
kind=ast.FlowKind.BATCH,
title=SimpleOptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"Global title",
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
params=None,
images={
"image_a": ast.Image(
_start=Pos(4, 4, config_file),
_end=Pos(12, 0, config_file),
ref=StrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "image:banana"
),
context=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "dir"
),
dockerfile=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "dir/Dockerfile"
),
build_args=SequenceItemsExpr(
[
StrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "--arg1"
),
StrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "val1"),
StrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"--arg2=val2",
),
]
),
env=None,
volumes=None,
build_preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
force_rebuild=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), True
),
)
},
volumes={
"volume_a": ast.Volume(
_start=Pos(14, 4, config_file),
_end=Pos(18, 2, config_file),
remote=URIExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "storage:dir"
),
mount=RemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "/var/dir"
),
local=OptLocalPathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "dir"
),
read_only=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), True
),
),
"volume_b": ast.Volume(
_start=Pos(19, 4, config_file),
_end=Pos(21, 0, config_file),
remote=URIExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "storage:other"
),
mount=RemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "/var/other"
),
local=OptLocalPathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
read_only=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
),
},
defaults=ast.BatchFlowDefaults(
_start=Pos(22, 2, config_file),
_end=Pos(34, 0, config_file),
_specified_fields={
"env",
"fail_fast",
"volumes",
"life_span",
"schedule_timeout",
"max_parallel",
"preset",
"workdir",
"tags",
},
tags=SequenceItemsExpr(
[
StrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "tag-a"),
StrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "tag-b"),
]
),
env=MappingItemsExpr(
{
"global_a": StrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "val-a"
),
"global_b": StrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "val-b"
),
}
),
volumes=SequenceItemsExpr(
[
OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"storage:common:/mnt/common:rw",
),
]
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "/global/dir"
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "1d4h"
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-large"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "5d4h3m2s"
),
fail_fast=OptBoolExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), True),
max_parallel=OptIntExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), 10),
cache=None,
),
mixins=None,
tasks=[
ast.Task(
_start=Pos(35, 4, config_file),
_end=Pos(57, 0, config_file),
_specified_fields={
"life_span",
"http_auth",
"entrypoint",
"title",
"cmd",
"schedule_timeout",
"workdir",
"env",
"tags",
"name",
"preset",
"image",
"http_port",
"pass_config",
"id",
"volumes",
},
mixins=None,
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "test_a"),
title=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "Batch title"
),
needs=None,
name=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "job-name"
),
image=OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ images.image_a.ref }}",
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-micro"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "1d1s"
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "bash"
),
cmd=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "/local/dir"
),
env=MappingItemsExpr(
{
"local_a": StrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "val-1"
),
"local_b": StrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "val-2"
),
}
),
volumes=SequenceItemsExpr(
[
OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ volumes.volume_a.ref }}",
),
OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"storage:dir:/var/dir:ro",
),
OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"",
),
OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
]
),
tags=SequenceItemsExpr(
[
StrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "tag-1"
),
StrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "tag-2"
),
]
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "2h55m"
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), 8080
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), False
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), True
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
)
],
)
def test_parse_seq(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-seq.yml"
flow = parse_batch(workspace, config_file)
assert flow == ast.BatchFlow(
Pos(0, 0, config_file),
Pos(9, 0, config_file),
id=SimpleOptIdExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
kind=ast.FlowKind.BATCH,
title=SimpleOptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
params=None,
images=None,
volumes=None,
defaults=None,
mixins=None,
tasks=[
ast.Task(
_start=Pos(2, 4, config_file),
_end=Pos(6, 2, config_file),
_specified_fields={"preset", "cmd", "image"},
mixins=None,
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs=None,
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-micro"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptBashExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
ast.Task(
_start=Pos(6, 4, config_file),
_end=Pos(9, 0, config_file),
_specified_fields={"preset", "cmd", "image"},
mixins=None,
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs=None,
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-micro"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptBashExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo def"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
],
)
def test_parse_needs(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-needs.yml"
flow = parse_batch(workspace, config_file)
assert flow == ast.BatchFlow(
Pos(0, 0, config_file),
Pos(11, 0, config_file),
id=SimpleOptIdExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
kind=ast.FlowKind.BATCH,
title=SimpleOptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
params=None,
images=None,
volumes=None,
defaults=None,
mixins=None,
tasks=[
ast.Task(
_start=Pos(2, 4, config_file),
_end=Pos(7, 2, config_file),
_specified_fields={"cmd", "image", "id", "preset"},
mixins=None,
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "task_a"),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs=None,
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-micro"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptBashExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
ast.Task(
_start=Pos(7, 4, config_file),
_end=Pos(11, 0, config_file),
_specified_fields={"needs", "image", "cmd", "preset"},
mixins=None,
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs={
IdExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "task_a"
): NeedsLevel.COMPLETED
},
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-micro"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptBashExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo def"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
],
)
def test_parse_needs_dict(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-needs-dict.yml"
flow = parse_batch(workspace, config_file)
assert flow == ast.BatchFlow(
Pos(0, 0, config_file),
Pos(12, 0, config_file),
id=SimpleOptIdExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
kind=ast.FlowKind.BATCH,
title=SimpleOptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
params=None,
images=None,
volumes=None,
defaults=None,
mixins=None,
tasks=[
ast.Task(
_start=Pos(2, 4, config_file),
_end=Pos(7, 2, config_file),
_specified_fields={"preset", "image", "cmd", "id"},
mixins=None,
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "task_a"),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs=None,
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-micro"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptBashExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
ast.Task(
_start=Pos(7, 4, config_file),
_end=Pos(12, 0, config_file),
_specified_fields={"preset", "image", "cmd", "needs"},
mixins=None,
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs={
IdExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ 'task_a' }}",
): NeedsLevel.RUNNING
},
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-micro"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptBashExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo def"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
],
)
def test_parse_matrix(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-matrix.yml"
flow = parse_batch(workspace, config_file)
assert flow == ast.BatchFlow(
Pos(0, 0, config_file),
Pos(14, 0, config_file),
id=SimpleOptIdExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
kind=ast.FlowKind.BATCH,
title=SimpleOptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
params=None,
images=None,
volumes=None,
defaults=None,
mixins=None,
tasks=[
ast.Task(
_start=Pos(2, 4, config_file),
_end=Pos(14, 0, config_file),
_specified_fields={"strategy", "image", "cmd"},
mixins=None,
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs=None,
strategy=ast.Strategy(
_start=Pos(3, 6, config_file),
_end=Pos(12, 4, config_file),
matrix=ast.Matrix(
_start=Pos(4, 8, config_file),
_end=Pos(12, 4, config_file),
products={
"one": [
PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "o1"
),
PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "o2"
),
],
"two": [
PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "t1"
),
PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "t2"
),
],
},
exclude=[
{
"one": PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "o1"
),
"two": PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "t2"
),
}
],
include=[
{
"one": PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "o3"
),
"two": PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "t3"
),
}
],
),
fail_fast=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
max_parallel=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
),
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
)
],
)
def test_parse_matrix_with_strategy(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-matrix-with-strategy.yml"
flow = parse_batch(workspace, config_file)
assert flow == ast.BatchFlow(
Pos(0, 0, config_file),
Pos(28, 0, config_file),
id=SimpleOptIdExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
kind=ast.FlowKind.BATCH,
title=SimpleOptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
params=None,
images=None,
volumes=None,
defaults=ast.BatchFlowDefaults(
Pos(2, 2, config_file),
Pos(7, 0, config_file),
_specified_fields={"fail_fast", "cache", "max_parallel"},
tags=None,
env=None,
volumes=None,
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
preset=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
fail_fast=OptBoolExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), True),
max_parallel=OptIntExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), 15),
cache=ast.Cache(
Pos(5, 4, config_file),
Pos(7, 0, config_file),
strategy=ast.CacheStrategy.NONE,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "2h30m"
),
),
),
mixins=None,
tasks=[
ast.Task(
_start=Pos(8, 4, config_file),
_end=Pos(25, 2, config_file),
_specified_fields={"image", "strategy", "cmd", "cache"},
mixins=None,
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs=None,
strategy=ast.Strategy(
Pos(9, 6, config_file),
Pos(20, 4, config_file),
matrix=ast.Matrix(
Pos(10, 8, config_file),
Pos(18, 6, config_file),
products={
"one": [
PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "o1"
),
PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "o2"
),
],
"two": [
PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "t1"
),
PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "t2"
),
],
},
exclude=[
{
"one": PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "o1"
),
"two": PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "t2"
),
}
],
include=[
{
"one": PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "o3"
),
"two": PrimitiveExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "t3"
),
}
],
),
fail_fast=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), False
),
max_parallel=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), 5
),
),
cache=ast.Cache(
Pos(21, 6, config_file),
Pos(23, 4, config_file),
strategy=ast.CacheStrategy.DEFAULT,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "1h30m"
),
),
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
ast.Task(
Pos(25, 4, config_file),
Pos(28, 0, config_file),
_specified_fields={"id", "image", "cmd"},
mixins=None,
id=OptIdExpr(
Pos(25, 8, config_file), Pos(25, 14, config_file), "simple"
),
needs=None,
strategy=None,
enable=EnableExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "${{ success() }}"
),
cache=None,
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(26, 11, config_file), Pos(26, 17, config_file), "ubuntu"
),
preset=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptStrExpr(
Pos(27, 9, config_file), Pos(27, 17, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
),
],
)
def test_parse_args(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-params.yml"
flow = parse_batch(workspace, config_file)
assert flow == ast.BatchFlow(
Pos(0, 0, config_file),
Pos(13, 0, config_file),
id=SimpleOptIdExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
kind=ast.FlowKind.BATCH,
title=SimpleOptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
params={
"arg1": ast.Param(
_start=Pos(2, 8, config_file),
_end=Pos(
2,
12,
config_file,
),
default=OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"val1",
),
descr=OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
),
"arg2": ast.Param(
_start=Pos(
4,
4,
config_file,
),
_end=Pos(
6,
0,
config_file,
),
default=OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"val2",
),
descr=OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"descr2",
),
),
},
images=None,
volumes=None,
defaults=ast.BatchFlowDefaults(
_start=Pos(7, 2, config_file),
_end=Pos(10, 0, config_file),
_specified_fields={"tags"},
tags=SequenceItemsExpr(
[
StrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ params.arg1 }}",
),
StrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ params.arg2 }}",
),
]
),
env=None,
volumes=None,
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
preset=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
fail_fast=OptBoolExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
max_parallel=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cache=None,
),
mixins=None,
tasks=[
ast.Task(
_start=Pos(
11,
4,
config_file,
),
_end=Pos(
13,
0,
config_file,
),
_specified_fields={"image", "cmd"},
mixins=None,
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs=None,
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
)
],
)
def test_parse_enable(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-enable.yml"
flow = parse_batch(workspace, config_file)
assert flow == ast.BatchFlow(
Pos(0, 0, config_file),
Pos(11, 0, config_file),
id=SimpleOptIdExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
kind=ast.FlowKind.BATCH,
title=SimpleOptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
params=None,
images=None,
volumes=None,
defaults=None,
mixins=None,
tasks=[
ast.Task(
_start=Pos(2, 4, config_file),
_end=Pos(6, 2, config_file),
_specified_fields={"cmd", "id", "preset", "image"},
mixins=None,
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "task_a"),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs=None,
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-micro"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptBashExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
ast.Task(
_start=Pos(6, 4, config_file),
_end=Pos(11, 0, config_file),
_specified_fields={"enable", "image", "needs", "cmd", "preset"},
mixins=None,
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs={
IdExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "task_a"
): NeedsLevel.COMPLETED
},
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "ubuntu"
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-micro"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptBashExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
],
)
def test_parse_mixin(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-mixin.yml"
flow = parse_batch(workspace, config_file)
assert flow == ast.BatchFlow(
Pos(0, 0, config_file),
Pos(11, 0, config_file),
id=SimpleOptIdExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
kind=ast.FlowKind.BATCH,
title=SimpleOptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
None,
),
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
params=None,
images=None,
volumes=None,
defaults=None,
mixins={
"basic": ast.TaskMixin(
Pos(3, 4, config_file),
Pos(5, 0, config_file),
_specified_fields={"image", "preset"},
mixins=None,
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"ubuntu",
),
preset=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-micro"
),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
needs=None,
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
},
tasks=[
ast.Task(
_start=Pos(6, 4, config_file),
_end=Pos(9, 2, config_file),
_specified_fields={"mixins", "cmd"},
mixins=[
StrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "basic")
],
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs=None,
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
preset=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptBashExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo abc"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
ast.Task(
_start=Pos(9, 4, config_file),
_end=Pos(11, 0, config_file),
_specified_fields={"mixins", "cmd"},
mixins=[
StrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "basic")
],
id=OptIdExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
title=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
needs=None,
name=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
image=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
preset=OptStrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), None),
schedule_timeout=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
entrypoint=OptStrExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
cmd=OptBashExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), "echo def"
),
workdir=OptRemotePathExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
env=None,
volumes=None,
tags=None,
life_span=OptTimeDeltaExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_port=OptIntExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
http_auth=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
pass_config=OptBoolExpr(
Pos(0, 0, config_file), Pos(0, 0, config_file), None
),
strategy=None,
cache=None,
enable=EnableExpr(
Pos(0, 0, config_file),
Pos(0, 0, config_file),
"${{ success() }}",
),
),
],
)
def test_batch_job_extra_attrs(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-task-extra-attrs.yml"
with pytest.raises(ConstructorError):
parse_batch(workspace, config_file)
def test_batch_action_call_extra_attrs(assets: pathlib.Path) -> None:
workspace = assets
config_file = workspace / "batch-action-call-extra-attrs.yml"
with pytest.raises(ConstructorError):
parse_batch(workspace, config_file)
| 39.05098 | 88 | 0.419361 | 5,834 | 59,748 | 4.118272 | 0.033768 | 0.316324 | 0.31545 | 0.303546 | 0.934945 | 0.91684 | 0.910597 | 0.90144 | 0.897986 | 0.893324 | 0 | 0.049603 | 0.467212 | 59,748 | 1,529 | 89 | 39.076521 | 0.705149 | 0 | 0 | 0.839973 | 0 | 0 | 0.029842 | 0.00236 | 0 | 0 | 0 | 0 | 0.005976 | 1 | 0.007304 | false | 0.011288 | 0.005312 | 0 | 0.012616 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
441bd529e25f0424238d6c48bc677da192807ec7 | 60 | py | Python | vit_pytorch_lightning/__init__.py | makoto-sofue/vit-pytorch-lightning | da8cace2ba06a2d1b277dec9a50ec9cd97b61230 | [
"MIT"
] | null | null | null | vit_pytorch_lightning/__init__.py | makoto-sofue/vit-pytorch-lightning | da8cace2ba06a2d1b277dec9a50ec9cd97b61230 | [
"MIT"
] | null | null | null | vit_pytorch_lightning/__init__.py | makoto-sofue/vit-pytorch-lightning | da8cace2ba06a2d1b277dec9a50ec9cd97b61230 | [
"MIT"
] | null | null | null | from vit_pytorch_lightning.vit_pytorch_lightning import ViT
| 30 | 59 | 0.916667 | 9 | 60 | 5.666667 | 0.555556 | 0.392157 | 0.745098 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066667 | 60 | 1 | 60 | 60 | 0.910714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
44288ffc2a2a0fb69f4990f6b539028fd68c548f | 5,012 | py | Python | transportmodels/test.py | xianqiu/TransportModels | 40910d71d04c801a72a6f6361444ef5a67f755da | [
"MIT"
] | null | null | null | transportmodels/test.py | xianqiu/TransportModels | 40910d71d04c801a72a6f6361444ef5a67f755da | [
"MIT"
] | 1 | 2020-02-19T10:58:18.000Z | 2020-02-19T10:58:18.000Z | transportmodels/test.py | xianqiu/TransportModels | 40910d71d04c801a72a6f6361444ef5a67f755da | [
"MIT"
] | null | null | null | import unittest
from transportmodels import Transport, TransModel
class TestTransport(unittest.TestCase):
def test_solve1(self):
t = Transport()
t.set_supplies([200, 250])
t.set_demands([100, 150, 200])
t.set_cost_matrix([[90, 70, 100], [80, 65, 75]])
t.solve()
x = [[50.0, 150.0, 0.0], [50.0, 0.0, 200.0]]
v = 34000.0
self.assertEqual(t.get_solution(), x)
self.assertEqual(t.get_objective_value(), v)
def test_solve2(self):
t = Transport()
t.set_supplies([5, 7, 3])
t.set_demands([7, 3, 5])
t.set_cost_matrix([[3, 1, 100], [4, 2, 4], [100, 3, 3]])
t.solve()
x = [[5.0, 0.0, 0.0], [2.0, 3.0, 2.0], [0.0, 0.0, 3.0]]
v = 46.0
self.assertEqual(t.get_solution(), x)
self.assertEqual(t.get_objective_value(), v)
class TestTransModel(unittest.TestCase):
def test_solve1(self):
quota_vectors = [5, 7, 3, -7, -3, -5]
cost_matrix = [[0, 0, 0, 3, 1, 100], [0, 0, 0, 4, 2, 4], [0, 0, 0, 100, 3, 3],
[3, 4, 100, 0, 0, 0], [1, 2, 3, 0, 0, 0], [100, 4, 3, 0, 0, 0]]
t = TransModel()
t.set_quota_vectors(quota_vectors)
t.set_cost_matrix(cost_matrix)
t.solve()
x = [[5.0, 0.0, 0.0, 0.0, 0.0, 0.0], [2.0, 3.0, 2.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 3.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
v = 46.0
self.assertEqual(t.get_solutions(), x)
self.assertEqual(t.get_objective_values(), v)
def test_solve2(self):
quota_vectors = [[5, 7, 3, -7, -3, -5],
[5, -7, 3, 7, -3, -5],
[5, 7, -3, -7, 3, -5],
[-5, 7, 3, -7, -3, 5]
]
cost_matrix = [[0, 0, 0, 3, 1, 100], [0, 0, 0, 4, 2, 4], [0, 0, 0, 100, 3, 3],
[3, 4, 100, 0, 0, 0], [1, 2, 3, 0, 0, 0], [100, 4, 3, 0, 0, 0]]
t = TransModel()
t.set_quota_vectors(quota_vectors)
t.set_cost_matrix(cost_matrix)
t.solve()
x = [[[5.0, 0.0, 0.0, 0.0, 0.0, 0.0], [2.0, 3.0, 2.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 3.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
[[4.0, 0.9999999999999998, 0.0, 0.0, 0.0, 0.0], [3.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 2.0, 5.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
[[0.0, 5.0, 0.0, 0.0, 0.0, 0.0], [3.0, 2.0, 2.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 3.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
[[2.0, 2.0, 3.0, 0.0, 0.0, 0.0], [3.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 5.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]]
v = [46.0, 0.9999999999999998, 31.0, 14.0]
self.assertEqual(t.get_solutions(), x)
self.assertEqual(t.get_objective_values(), v)
def test_solve3(self):
quota_vectors = [[5, 7, 3, -7, -3, -5],
[5, -7, 3, 7, -3, -5],
[5, 7, -3, -7, 3, -5],
[-5, 7, 3, -7, -3, 5]
]
cost_matrix = [[0, 0, 0, 3, 1, 100], [0, 0, 0, 4, 2, 4], [0, 0, 0, 100, 3, 3],
[3, 4, 100, 0, 0, 0], [1, 2, 3, 0, 0, 0], [100, 4, 3, 0, 0, 0]]
t = TransModel()
t.set_processors(3)
t.set_quota_vectors(quota_vectors)
t.set_cost_matrix(cost_matrix)
t.solve()
x = [[[5.0, 0.0, 0.0, 0.0, 0.0, 0.0], [2.0, 3.0, 2.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 3.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
[[4.0, 0.9999999999999998, 0.0, 0.0, 0.0, 0.0], [3.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 2.0, 5.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
[[0.0, 5.0, 0.0, 0.0, 0.0, 0.0], [3.0, 2.0, 2.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 3.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
[[2.0, 2.0, 3.0, 0.0, 0.0, 0.0], [3.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 5.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]]
v = [46.0, 0.9999999999999998, 31.0, 14.0]
self.assertEqual(t.get_solutions(), x)
self.assertEqual(t.get_objective_values(), v)
if __name__ == '__main__':
unittest.main()
| 45.563636 | 92 | 0.40423 | 1,095 | 5,012 | 1.794521 | 0.054795 | 0.61883 | 0.847328 | 1.030025 | 0.866667 | 0.856997 | 0.801018 | 0.800509 | 0.798473 | 0.78626 | 0 | 0.304491 | 0.333599 | 5,012 | 109 | 93 | 45.981651 | 0.283832 | 0 | 0 | 0.768421 | 0 | 0 | 0.001596 | 0 | 0 | 0 | 0 | 0 | 0.105263 | 1 | 0.052632 | false | 0 | 0.021053 | 0 | 0.094737 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13 |
4461509869977241e2b5ced5076096ba3b7f67a8 | 4,290 | py | Python | big_data_prep.py | badmanwillis/Robot-Football-Vision-AI | 7f6d8f6afb559c08e12da0b0405e821cc45b75b1 | [
"MIT"
] | 1 | 2018-08-13T08:01:18.000Z | 2018-08-13T08:01:18.000Z | big_data_prep.py | badmanwillis/Robot-Football-Vision-AI | 7f6d8f6afb559c08e12da0b0405e821cc45b75b1 | [
"MIT"
] | null | null | null | big_data_prep.py | badmanwillis/Robot-Football-Vision-AI | 7f6d8f6afb559c08e12da0b0405e821cc45b75b1 | [
"MIT"
] | null | null | null | '''
Olly Smith 2018
olly.smith1994@gmail.com
This program uses videos of data, stored in folders, to generate an image dataset for training a model. The folders contain videos of data, by class (ball, no_ball). The program will go through each video, and save frames to a folder of that class.
16.4.18
Preparing data for what will hopefully be the last time, barring segmentation implementation.
This works brill, adding named windows would be nice, as well as removing the repeated code.
'''
import cv2
import os
import glob
# INPUT FOLDERS
Ball_folder = "/media/olly/Storage/Uni/project/may_demo/training_vids/ball_vids/*"
No_Ball_folder = "/media/olly/Storage/Uni/project/may_demo/training_vids/no_ball_vids/*"
# OUTPUT FOLDERS
train_ball = "/home/olly/Desktop/may_demo/dataset_5/data/train/ball"
train_no_ball = "/home/olly/Desktop/may_demo/dataset_5/data/train/no_ball"
test_ball = "/home/olly/Desktop/may_demo/dataset_5/data/test/ball"
test_no_ball = "/home/olly/Desktop/may_demo/dataset_5/data/test/no_ball"
SKIP = 4 # every Nth+1 frame to use
SPLIT = 4 # This number determines the train-test split ratio. Eg 4, means for every 4 train images, there will be a test image, ergo a 80/20 train-test split. 3 = 75/25
frameskip = 0 # choosing to save every X frame of the video
train_test = 0 # the ratio of frames for training vs testing
trainCount = 0 # used for saving filenames
testCount = 0 # used for saving filenames
# For the no_ball data
for name in glob.glob(Ball_folder): #'dir/*'
print name
cap = cv2.VideoCapture(name)
while True :
ret, frame = cap.read()
if ret == True:
# ESC to stop
k = cv2.waitKey(1)
if k==27:
cv2.destroyAllWindows()
break
# Preview the image
preview = cv2.resize(frame, (480, 270))
#cv2.imshow("preview", preview)
# Lets prepare our dataset
if frameskip == SKIP: # every Nth+1 frame
cv2.waitKey(1)
if train_test == SPLIT:
# resize image to network input size
test = cv2.resize(frame, (150, 150))
#cv2.imshow("save test", test)
# Save the frame, inc count
cv2.imwrite(os.path.join(test_ball,("ball_%d.jpg" % testCount)), test)
print "saved test ball_%d" % testCount
testCount +=1
train_test = 0 # reset train_test count
frameskip = 0
train = cv2.resize(frame, (150, 150))
#cv2.imshow("save train", train)
# Save the frame, inc count
cv2.imwrite(os.path.join(train_ball,("ball_%d.jpg" %trainCount)), train)
print "saved train ball_%d" % trainCount
trainCount +=1
frameskip = 0
train_test +=1
frameskip +=1
else:
cap.release()
cv2.destroyAllWindows()
break
cap.release()
cv2.destroyAllWindows()
''' Doing exactly the same thing again because it works and i'm lazy '''
frameskip = 0 # choosing to save every X frame of the video
train_test = 0 # the ratio of frames for training vs testing
trainCount = 0 # used for saving filenames
testCount = 0 # used for saving filenames
# For the ball data
for name in glob.glob(No_Ball_folder): #'dir/*'
print name
cap = cv2.VideoCapture(name)
while True :
ret, frame = cap.read()
if ret == True:
# ESC to stop
k = cv2.waitKey(1)
if k==27:
cv2.destroyAllWindows()
break
# Preview the image
preview = cv2.resize(frame, (480, 270))
#cv2.imshow("preview", preview)
# Lets prepare our dataset
if frameskip == SKIP: # every Nth+1 frame
cv2.waitKey(1)
if train_test == SPLIT:
# resize image to network input size
test = cv2.resize(frame, (150, 150))
#cv2.imshow("save test", test)
# Save the frame, inc count
cv2.imwrite(os.path.join(test_no_ball,("no_ball_%d.jpg" % testCount)), test)
print "saved test no_ball_%d" % testCount
testCount +=1
train_test = 0 # reset train_test count
frameskip = 0
train = cv2.resize(frame, (150, 150))
#cv2.imshow("save train", train)
# Save the frame, inc count
cv2.imwrite(os.path.join(train_no_ball,("no_ball_%d.jpg" %trainCount)), train)
print "saved train no_ball_%d" % trainCount
trainCount +=1
frameskip = 0
train_test +=1
frameskip +=1
else:
cap.release()
cv2.destroyAllWindows()
break
cap.release()
cv2.destroyAllWindows()
| 24.514286 | 248 | 0.682984 | 655 | 4,290 | 4.383206 | 0.247328 | 0.031348 | 0.029258 | 0.026472 | 0.751654 | 0.751654 | 0.746082 | 0.728666 | 0.677813 | 0.677813 | 0 | 0.037758 | 0.20979 | 4,290 | 174 | 249 | 24.655172 | 0.809145 | 0.255245 | 0 | 0.752941 | 1 | 0 | 0.186579 | 0.136152 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.035294 | null | null | 0.070588 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
92323e93dc6d1b881d3b0220521d582d67a3ee6b | 33 | py | Python | pywirelessmbus/utils/utils.py | karlTGA/pywmbus | e075f45d4b3b93e54ae05461360aa55b960fc107 | [
"MIT"
] | 3 | 2020-06-04T22:56:50.000Z | 2020-11-06T21:17:28.000Z | pywirelessmbus/utils/utils.py | karlTGA/pywmbus | e075f45d4b3b93e54ae05461360aa55b960fc107 | [
"MIT"
] | null | null | null | pywirelessmbus/utils/utils.py | karlTGA/pywmbus | e075f45d4b3b93e54ae05461360aa55b960fc107 | [
"MIT"
] | 2 | 2020-05-29T13:38:08.000Z | 2020-10-07T19:22:07.000Z | def NOOP(*args):
return None
| 11 | 16 | 0.636364 | 5 | 33 | 4.2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.242424 | 33 | 2 | 17 | 16.5 | 0.84 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
92582b8874e12f49409f6eb1d11a549090f00c40 | 2,586 | py | Python | models/vgg16.py | sugaok/my-deep-learning-base | c98c37cf0a5b3cfa5929ebb80aa47bdade4f853f | [
"MIT"
] | 1 | 2019-11-21T12:39:42.000Z | 2019-11-21T12:39:42.000Z | models/vgg16.py | sugaok/my-deep-learning-base | c98c37cf0a5b3cfa5929ebb80aa47bdade4f853f | [
"MIT"
] | null | null | null | models/vgg16.py | sugaok/my-deep-learning-base | c98c37cf0a5b3cfa5929ebb80aa47bdade4f853f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""TensorFlow Kerasのモデル"""
from tensorflow import keras
def model_vgg16_keras(shape: tuple, classes: int) -> keras.Model:
"""
Keras Applicationsに用意されているVGG16を読み込む。
Very Deep Convolutional Networks for Large-Scale Image Recognition
Karen Simonyan, Andrew Zisserman
https://arxiv.org/abs/1409.1556
Args:
shape tuple:
入力の形状を指定する。
classes int:
分類するクラス数を指定する。
Returns:
keras.Model:
VGG16を返す。
"""
return keras.applications.vgg16.VGG16(
include_top=True,
weights=None,
input_tensor=None,
input_shape=shape,
pooling=None,
classes=classes
)
def vgg16_1d(shape: tuple, classes: int) -> keras.Model:
"""
一次元版のVGG16を読み込む。
Very Deep Convolutional Networks for Large-Scale Image Recognition
Karen Simonyan, Andrew Zisserman
https://arxiv.org/abs/1409.1556
Args:
shape tuple:
入力の形状を指定する。
classes int:
分類するクラス数を指定する。
Returns:
keras.Model:
VGG16を返す。
"""
return keras.Sequential([
keras.layers.InputLayer(shape),
keras.layers.Conv1D(64, 3, padding='same', activation='relu'),
keras.layers.Conv1D(64, 3, padding='same', activation='relu'),
keras.layers.MaxPool1D(2),
keras.layers.Conv1D(128, 3, padding='same', activation='relu'),
keras.layers.Conv1D(128, 3, padding='same', activation='relu'),
keras.layers.MaxPool1D(2),
keras.layers.Conv1D(256, 3, padding='same', activation='relu'),
keras.layers.Conv1D(256, 3, padding='same', activation='relu'),
keras.layers.Conv1D(256, 1, padding='same', activation='relu'),
keras.layers.MaxPool1D(2),
keras.layers.Conv1D(512, 3, padding='same', activation='relu'),
keras.layers.Conv1D(512, 3, padding='same', activation='relu'),
keras.layers.Conv1D(512, 1, padding='same', activation='relu'),
keras.layers.MaxPool1D(2),
keras.layers.Conv1D(512, 3, padding='same', activation='relu'),
keras.layers.Conv1D(512, 3, padding='same', activation='relu'),
keras.layers.Conv1D(512, 1, padding='same', activation='relu'),
keras.layers.MaxPool1D(2),
keras.layers.Flatten(),
keras.layers.Dense(4096, activation='relu'),
keras.layers.Dense(4096, activation='relu'),
keras.layers.Dense(classes, activation='softmax')
], name='vgg16_1d')
| 33.584416 | 72 | 0.603635 | 279 | 2,586 | 5.569892 | 0.261649 | 0.162806 | 0.183398 | 0.241313 | 0.799228 | 0.799228 | 0.760618 | 0.760618 | 0.760618 | 0.706564 | 0 | 0.061715 | 0.260634 | 2,586 | 76 | 73 | 34.026316 | 0.751046 | 0.240526 | 0 | 0.527778 | 0 | 0 | 0.071995 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.027778 | 0 | 0.138889 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
927b61ea19f55500c46461a2caf691387d8fba6a | 122 | py | Python | components/icdc-pidgin/tests/conftest.py | CBIIT/icdc-docker | 5dc78b96a8d885b3fa427c55b9cc19f4771910fa | [
"Apache-2.0"
] | 2 | 2019-06-10T15:30:51.000Z | 2020-01-18T23:24:13.000Z | components/icdc-pidgin/tests/conftest.py | CBIIT/icdc-docker | 5dc78b96a8d885b3fa427c55b9cc19f4771910fa | [
"Apache-2.0"
] | null | null | null | components/icdc-pidgin/tests/conftest.py | CBIIT/icdc-docker | 5dc78b96a8d885b3fa427c55b9cc19f4771910fa | [
"Apache-2.0"
] | 1 | 2022-03-31T09:52:46.000Z | 2022-03-31T09:52:46.000Z | import pytest
from pidgin.app import app as pidgin_app
@pytest.fixture(scope='session')
def app():
return pidgin_app
| 17.428571 | 40 | 0.762295 | 19 | 122 | 4.789474 | 0.578947 | 0.296703 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.147541 | 122 | 6 | 41 | 20.333333 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0.057377 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | true | 0 | 0.4 | 0.2 | 0.8 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
92a26f789966505f8bee1c3039ee12bfd354c6fe | 283 | py | Python | src/mess/test.py | zerohope/OJcodes | 4f140f6e891ab14132cd69faddd52ecddd8ab7dc | [
"MIT"
] | 2 | 2021-03-27T01:17:39.000Z | 2021-03-27T01:18:13.000Z | src/mess/test.py | zerohope/OJcodes | 4f140f6e891ab14132cd69faddd52ecddd8ab7dc | [
"MIT"
] | null | null | null | src/mess/test.py | zerohope/OJcodes | 4f140f6e891ab14132cd69faddd52ecddd8ab7dc | [
"MIT"
] | null | null | null | import torch
# a=torch.tensor([[1.,2.,3.],[4.,5.,6.]])
# print(torch.softmax(a,dim=1))
# c = torch.Tensor([[[1,2,3], [4,5,6]], [[7,8,9], [10,11,12]]]) # shape (2,2,3)
# print( torch.softmax(c, dim=2))
print(torch.softmax(torch.tensor([[1.,8.,10.]]), dim=1).max(1))
# print(1e-4) | 35.375 | 82 | 0.551237 | 57 | 283 | 2.736842 | 0.403509 | 0.211538 | 0.230769 | 0.166667 | 0.217949 | 0.217949 | 0.217949 | 0.217949 | 0 | 0 | 0 | 0.133333 | 0.09894 | 283 | 8 | 83 | 35.375 | 0.478431 | 0.681979 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0.5 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 8 |
92b046053d158dc8a79ff2488fa26524075a02ee | 17,190 | py | Python | grid_search.py | SlackEight/DNN-TRND | 89b2f0fe426dbcc4786dc40cbb96ab845b4ad49e | [
"CC0-1.0"
] | 2 | 2021-08-28T10:03:06.000Z | 2021-11-12T04:55:38.000Z | grid_search.py | SlackEight/DNN-TRND | 89b2f0fe426dbcc4786dc40cbb96ab845b4ad49e | [
"CC0-1.0"
] | null | null | null | grid_search.py | SlackEight/DNN-TRND | 89b2f0fe426dbcc4786dc40cbb96ab845b4ad49e | [
"CC0-1.0"
] | null | null | null | import utils.polar_pla as pla
import torch
import models
import progressbar
import time
import utils.dnn_methods as dm
if torch.cuda.is_available():
dev = torch.device("cuda:0")
else:
dev = torch.device("cpu")
# ---------- DATA PREPROCESSING ---------- #
output_file = 'results.txt'
# filenames of the datasets
file_names = ['CTtemp.csv','snp500.csv', 'hpc.csv']#,'hpc.csv', 'sin.csv']
max_errors = [6000, 10, 5000]
filter_size = [5,10,40]
epochs_per_set = [1,1,1]#[1000,50,200]#[2000,100,200]
data_sets = []
# now we need pre-process the data
for i in range(len(file_names)):
name = file_names[i]
# read in the time series
f = open("DataSets/"+name, 'r')
time_series = []
for line in f:
time_series.append(float(line))
# apply median filter
time_series = pla.median_filter(time_series, filter_size[i])
# apply sliding window piecewise linear segmentation
x, _ = pla.sliding_window_pla(time_series, max_errors[i])
data_sets.append(x)
#component = 0
models_to_average = 5
# now we need to define two different set creation methods for CNNs and RNNs
train_proportion = 0.7
for component in range(3):
# ------------ MLP ------------ #
hyperparemeters = [[32, 64, 128], # number of nodes per hidden layer
[0.001], # learning rate
[64], # batch size
[4,6,8,10], # sequence length
[0.0,0.1,0.2,0.3] # dropout
]
total_models = 1
for set in hyperparemeters:
total_models *= len(set)
total_models *= len(data_sets)
index = 0
print("\nStarting MLP grid optimisation...")
start_time = time.time()
bar = progressbar.ProgressBar(maxval=total_models+1, \
widgets=[progressbar.Bar('█', '|', '|'), ' ', progressbar.Percentage()])
bar.start()
for i in range(len(data_sets)):
data_set = data_sets[i]
lowest_validation = 999999
best_hyper_params = []
for batch_size in hyperparemeters[2]:
for seq_length in hyperparemeters[3]:
# load in data
trainset, validationset, testset = dm.dataload(dm.sliding_window_MLP, batch_size, data_set, seq_length, train_proportion, component)
for lr in hyperparemeters[1]:
for nodes in hyperparemeters[0]:
for dropout in hyperparemeters[4]:
index += 1
# create the model
val_loss = 0
for jj in range(models_to_average):
model = models.MLP(seq_length*2, nodes, max(1,component), dropout).to(dev)
val_loss += dm.train_model(model, trainset, validationset, testset, lr, epochs_per_set[i])/models_to_average
bar.update(index)
if val_loss < lowest_validation:
lowest_validation = val_loss
best_hyper_params = [nodes, lr, batch_size, seq_length, dropout]
f = open(output_file, 'a')
f.write(f'MLP - Lowest validation loss: {lowest_validation} with following hyperparams: \n'
+f'Nodes: {best_hyper_params[0]}\n'
+f'LR: {best_hyper_params[1]}\n'
+f'Batch Size: {best_hyper_params[2]}\n'
+f'Sequence Length: {best_hyper_params[3]}\n'
+f'Dropout:{best_hyper_params[4]}\n'
+f'Dataset: {file_names[i]}')
f.close()
bar.finish()
elapsed = time.time()-start_time
print(f'MLP optimisation completed, results stored in {output_file}'
+f'\nModels Considered: {total_models}'
+f'\nCompletion Time: {int((elapsed/60)//60)}:{int(elapsed//60)%60}:{round(elapsed%60)}s\n\n')
# ------------ CNN ------------ #
hyperparemeters = [[32, 64,128], # number of nodes per hidden layer
[0.001, 0.01], # learning rate
[64], # batch size
[4,6,8,10], # sequence length
[0.0,0.1,0.2], # dropout
]
total_models = 1
for set in hyperparemeters:
total_models *= len(set)
total_models *= len(data_sets)
index = 0
print("Starting CNN grid optimisation...")
start_time = time.time()
bar = progressbar.ProgressBar(maxval=total_models+1, \
widgets=[progressbar.Bar('█', '|', '|'), ' ', progressbar.Percentage()])
bar.start()
# grid optimise
for i in range(len(data_sets)):
data_set = data_sets[i]
lowest_validation = 999999
best_hyper_params = []
for batch_size in hyperparemeters[2]:
for seq_length in hyperparemeters[3]:
# load in data
trainset, validationset, testset = dm.dataload(dm.sliding_window_CNN, batch_size, data_set, seq_length,train_proportion, component)
for lr in hyperparemeters[1]:
for nodes in hyperparemeters[0]:
for dropout in hyperparemeters[4]:
# create the model
index+=1
bar.update(index)
val_loss = 0
for jj in range(models_to_average):
model = models.CNN(seq_length, nodes, max(1,component), 2, dropout).to(dev)
val_loss += dm.train_model(model, trainset, validationset, testset, lr,epochs_per_set[i])/models_to_average
if val_loss < lowest_validation:
lowest_validation = val_loss
best_hyper_params = [nodes, lr, batch_size, seq_length, dropout]
f = open(output_file, 'a')
f.write(f'CNN -- Lowest validation loss: {lowest_validation} with following hyperparams: \n'
+f'Nodes: {best_hyper_params[0]}\n'
+f'LR: {best_hyper_params[1]}\n'
+f'Batch Size: {best_hyper_params[2]}\n '
+f'Sequence Length: {best_hyper_params[3]}\n '
+f'Dropout:{best_hyper_params[4]}\n'
+f'Dataset: {file_names[i]}\n\n')
f.close()
bar.finish()
elapsed = time.time()-start_time
print(f'CNN optimisation completed, results stored in {output_file}'
+f'\nModels Considered: {total_models}'
+f'\nCompletion Time: {int((elapsed/60)//60)}:{int(elapsed//60)%60}:{round(elapsed%60)}s\n\n')
# ------------ TCN ------------ #
hyperparemeters = [[32, 64,128], # number of nodes per hidden layer
[0.001], # learning rate
[64], # batch size
[4,6,8,10], # sequence length
[0.0,0.1,0.2], # dropout
[2,4,6]] # kernel size]
print("Starting TCN grid optimisation...")
start_time = time.time()
total_models = 1
for set in hyperparemeters:
total_models *= len(set)
total_models *= len(data_sets)
index = 0
bar = progressbar.ProgressBar(maxval=total_models+1, \
widgets=[progressbar.Bar('█', '|', '|'), ' ', progressbar.Percentage()])
bar.start()
for i in range(len(data_sets)):
data_set = data_sets[i]
lowest_validation = 999999
best_hyper_params = []
for batch_size in hyperparemeters[2]:
for seq_length in hyperparemeters[3]:
# load in data
trainset, validationset, testset = dm.dataload(dm.sliding_window_CNN, batch_size, data_set, seq_length,train_proportion,component)
for lr in hyperparemeters[1]:
for nodes in hyperparemeters[0]:
for dropout in hyperparemeters[4]:
for kernel_size in hyperparemeters[5]:
# create the model
index+=1
val_loss = 0
for av in range(models_to_average):
model = models.TCN(seq_length,max(1, component), [nodes]*3, kernel_size, dropout).to(dev)
val_loss += dm.train_model(model, trainset, validationset, testset, lr, epochs_per_set[i])/models_to_average
bar.update(index)
if val_loss < lowest_validation:
lowest_validation = val_loss
best_hyper_params = [nodes, lr, batch_size, seq_length, dropout, kernel_size]
f = open(output_file, 'a')
f.write(f'TCN -- Lowest validation loss: {lowest_validation} with following hyperparams: \n'
+f'Nodes: {best_hyper_params[0]}\n'
+f'LR: {best_hyper_params[1]}\n'
+f'Batch Size: {best_hyper_params[2]}\n '
+f'Sequence Length: {best_hyper_params[3]}\n '
+f'Dropout:{best_hyper_params[4]}\n'
+f'Kernel Size: {best_hyper_params[5]}\n'
+f'Dataset: {file_names[i]}')
f.close()
bar.finish()
elapsed = time.time()-start_time
print(f'TCN optimisation completed, results stored in {output_file}'
+f'\nModels Considered: {total_models}'
+f'\nCompletion Time: {int((elapsed/60)//60)}:{int(elapsed//60)%60}:{round(elapsed%60)}s\n\n')
# ------------ RNN ------------ #
hyperparemeters = [[64,65], # number of nodes per hidden layer
[0.001,0.0001], # learning rate
[64], # batch size
[4,6,8,10], # sequence length
[0.0] # dropout
]
print("Starting RNN grid optimisation...")
start_time = time.time()
total_models = 1
for set in hyperparemeters:
total_models *= len(set)
total_models *= len(data_sets)
index = 0
bar = progressbar.ProgressBar(maxval=total_models+1, \
widgets=[progressbar.Bar('█', '|', '|'), ' ', progressbar.Percentage()])
bar.start()
for i in range(len(data_sets)):
data_set = data_sets[i]
lowest_validation = 999999
best_hyper_params = []
for nodes in hyperparemeters[0]:
for lr in hyperparemeters[1]:
for batch_size in hyperparemeters[2]:
for seq_length in hyperparemeters[3]:
trainset, validationset, testset = dm.dataload(dm.sliding_window_RNN, batch_size, data_set, seq_length, train_proportion, component)
for dropout in hyperparemeters[4]:
# create the model
model = models.RNN(max(1,component), 2, nodes, 1, dropout)
index+=1
bar.update(index)
val_loss = 0
for jj in range(models_to_average):
val_loss += dm.train_model(model, trainset, validationset, testset, lr, epochs_per_set[i])
if val_loss < lowest_validation:
lowest_validation = val_loss
best_hyper_params = [nodes, lr, batch_size, seq_length, dropout]
f = open(output_file, 'a')
f.write(f'RNN -- Lowest validation loss: {lowest_validation} with following hyperparams: \n'
+f'Nodes: {best_hyper_params[0]}\n'
+f'LR: {best_hyper_params[1]}\n'
+f'Batch Size: {best_hyper_params[2]}\n '
+f'Sequence Length: {best_hyper_params[3]}\n '
+f'Dropout:{best_hyper_params[4]}\n\n')
f.close()
bar.finish()
elapsed = time.time()-start_time
print(f'RNN optimisation completed, results stored in {output_file}'
+f'\nModels Considered: {total_models}'
+f'\nCompletion Time: {int((elapsed/60)//60)}:{int(elapsed//60)%60}:{round(elapsed%60)}s\n\n')
# ------------ LSTM ------------ #
hyperparemeters = [[64,65], # number of nodes per hidden layer
[0.001,0.0001], # learning rate
[64], # batch size
[4,6,8,10], # sequence length
[0.0] # dropout
]
print("Starting LSTM grid optimisation...")
start_time = time.time()
total_models = 1
for set in hyperparemeters:
total_models *= len(set)
total_models *= len(data_sets)
index = 0
bar = progressbar.ProgressBar(maxval=total_models+1, \
widgets=[progressbar.Bar('█', '|', '|'), ' ', progressbar.Percentage()])
bar.start()
for i in range(len(data_sets)):
data_set = data_sets[i]
lowest_validation = 999999
best_hyper_params = []
for nodes in hyperparemeters[0]:
for lr in hyperparemeters[1]:
for batch_size in hyperparemeters[2]:
for seq_length in hyperparemeters[3]:
trainset, validationset, testset = dm.dataload(dm.sliding_window_RNN, batch_size, data_set, seq_length,train_proportion, component)
for dropout in hyperparemeters[4]:
# create the model
model = models.LSTM(max(1,component), 2, nodes, 1, dropout)
index+=1
bar.update(index)
val_loss = 0
for jj in range(models_to_average):
val_loss += dm.train_model(model, trainset, validationset, testset, lr, epochs_per_set[i])
if val_loss < lowest_validation:
lowest_validation = val_loss
best_hyper_params = [nodes, lr, batch_size, seq_length, dropout]
f = open(output_file, 'a')
f.write(f'LSTM -- Lowest validation loss: {lowest_validation} with following hyperparams: \n'
+f'Nodes: {best_hyper_params[0]}\n'
+f'LR: {best_hyper_params[1]}\n'
+f'Batch Size: {best_hyper_params[2]}\n '
+f'Sequence Length: {best_hyper_params[3]}\n '
+f'Dropout:{best_hyper_params[4]}\n\n')
f.close()
bar.finish()
elapsed = time.time()-start_time
print(f'LSTM optimisation completed, results stored in {output_file}'
+f'\nModels Considered: {total_models}'
+f'\nCompletion Time: {int((elapsed/60)//60)}:{int(elapsed//60)%60}:{round(elapsed%60)}s\n\n')
# ------------ Bi-LSTM ------------ #
print("Starting BiLSTM grid optimisation...")
start_time = time.time()
total_models = 1
for set in hyperparemeters:
total_models *= len(set)
total_models *= len(data_sets)
index = 0
bar = progressbar.ProgressBar(maxval=total_models+1, \
widgets=[progressbar.Bar('█', '|', '|'), ' ', progressbar.Percentage()])
bar.start()
for i in range(len(data_sets)):
data_set = data_sets[i]
lowest_validation = 999999
best_hyper_params = []
for nodes in hyperparemeters[0]:
for lr in hyperparemeters[1]:
for batch_size in hyperparemeters[2]:
for seq_length in hyperparemeters[3]:
trainset, validationset, testset = dm.dataload(dm.sliding_window_RNN, batch_size, data_set, seq_length,train_proportion, component)
for dropout in hyperparemeters[4]:
# create the model
model = models.BiLSTM(max(1,component), 2, nodes, 1, dropout)
index+=1
bar.update(index)
val_loss = 0
for jj in range(models_to_average):
val_loss += dm.train_model(model, trainset, validationset, testset, lr, epochs_per_set[i])
if val_loss < lowest_validation:
lowest_validation = val_loss
best_hyper_params = [nodes, lr, batch_size, seq_length, dropout]
f = open(output_file, 'a')
f.write(f'BiLSTM -- Lowest validation loss: {lowest_validation} with following hyperparams: \n'
+f'Nodes: {best_hyper_params[0]}\n'
+f'LR: {best_hyper_params[1]}\n'
+f'Batch Size: {best_hyper_params[2]}\n '
+f'Sequence Length: {best_hyper_params[3]}\n '
+f'Dropout:{best_hyper_params[4]}\n')
f.close()
bar.finish()
elapsed = time.time()-start_time
print(f'BiLSTM optimisation completed, results stored in {output_file}'
+f'\nModels Considered: {total_models}'
+f'\nCompletion Time: {int((elapsed/60)//60)}:{int(elapsed//60)%60}:{round(elapsed%60)}s\n\n')
| 43.964194 | 156 | 0.536766 | 1,980 | 17,190 | 4.489394 | 0.090909 | 0.043537 | 0.072562 | 0.0189 | 0.871977 | 0.868152 | 0.868152 | 0.864439 | 0.860277 | 0.860164 | 0 | 0.032962 | 0.34171 | 17,190 | 390 | 157 | 44.076923 | 0.752033 | 0.06434 | 0 | 0.790476 | 0 | 0.019048 | 0.188304 | 0.075896 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.019048 | 0 | 0.019048 | 0.038095 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2b8204d07894a8388ba88f2d886f5c5b9ed7ff22 | 4,009 | py | Python | tests/v1/nodes.py | tombry/virlutils | e98136b4e88c456828f2d0496c14f851f2627a46 | [
"MIT"
] | 133 | 2018-07-01T06:08:49.000Z | 2022-03-26T15:22:21.000Z | tests/v1/nodes.py | tombry/virlutils | e98136b4e88c456828f2d0496c14f851f2627a46 | [
"MIT"
] | 76 | 2018-06-28T16:41:57.000Z | 2022-03-26T17:23:06.000Z | tests/v1/nodes.py | tombry/virlutils | e98136b4e88c456828f2d0496c14f851f2627a46 | [
"MIT"
] | 43 | 2018-06-27T20:40:52.000Z | 2022-02-22T06:16:11.000Z | from . import BaseTest
from click.testing import CliRunner
import requests_mock
from virl.cli.main import virl
class NodesTest(BaseTest):
def mock_response(self):
sim_response = {
"guest|TEST_ENV|virl|router1": {
"Status": "ACTIVE",
"simLaunch": "2018-04-04T14:25:12.916689",
"PortConsole": 17000,
"NodeName": "router1",
"simExpires": None,
"managementIP": "1.1.1.1",
"SerialPorts": 2,
"SimulationHost": "5.5.5.5",
"NodeSubtype": "IOSv",
"simStatus": "ACTIVE",
"Reachable": True,
"PortMonitor": 17001,
"managementProtocol": "telnet",
"managementProxy": "jumphost",
"VncConsole": False,
"simID": "TEST_ENV",
"Annotation": "REACHABLE"
},
"guest|TEST_ENV|virl|router2": {
"Status": "BUILDING",
"simLaunch": "2018-04-04T14:25:12.916689",
"PortConsole": 17003,
"NodeName": "router2",
"simExpires": None,
"managementIP": "2.2.2.2",
"SerialPorts": 2,
"SimulationHost": "5.5.5.5",
"NodeSubtype": "IOSv",
"simStatus": "ACTIVE",
"Reachable": True,
"PortMonitor": 17004,
"managementProtocol": "telnet",
"managementProxy": "jumphost",
"VncConsole": False,
"simID": "TEST_ENV",
"Annotation": "REACHABLE"
},
"guest|TEST_ENV|virl|router3": {
"Status": "",
"simLaunch": "2018-04-04T14:25:12.916689",
"PortConsole": 17003,
"NodeName": "router2",
"simExpires": None,
"managementIP": "2.2.2.2",
"SerialPorts": 2,
"SimulationHost": "5.5.5.5",
"NodeSubtype": "IOSv",
"simStatus": "ACTIVE",
"Reachable": True,
"PortMonitor": 17004,
"managementProtocol": "telnet",
"managementProxy": "jumphost",
"VncConsole": False,
"simID": "TEST_ENV",
"Annotation": "REACHABLE"
},
"guest|TEST_ENV|virl|mgmt-lxc": {
"Status": "",
"simLaunch": "2018-04-04T14:25:12.916689",
"PortConsole": 17003,
"NodeName": "router2",
"simExpires": None,
"managementIP": "2.2.2.2",
"SerialPorts": 2,
"SimulationHost": "5.5.5.5",
"NodeSubtype": "LXC FLAT",
"simStatus": "ACTIVE",
"Reachable": True,
"PortMonitor": 17004,
"managementProtocol": "telnet",
"managementProxy": "jumphost",
"VncConsole": False,
"simID": "TEST_ENV",
"Annotation": "REACHABLE"
}
}
return sim_response
def test_virl_nodes(self):
with requests_mock.mock() as m:
# Mock the request to return what we expect from the API.
m.get('http://localhost:19399/roster/rest/',
json=self.mock_response())
runner = CliRunner()
result = runner.invoke(virl, ["nodes"])
self.assertEqual(0, result.exit_code)
def test_virl_nodes_in_env(self):
with requests_mock.mock() as m:
# Mock the request to return what we expect from the API.
m.get('http://localhost:19399/roster/rest/',
json=self.mock_response())
runner = CliRunner()
result = runner.invoke(virl, ["nodes", "foo"])
self.assertEqual(0, result.exit_code)
| 36.779817 | 69 | 0.458967 | 328 | 4,009 | 5.536585 | 0.280488 | 0.013216 | 0.013216 | 0.035242 | 0.81663 | 0.81663 | 0.78359 | 0.78359 | 0.761013 | 0.761013 | 0 | 0.073591 | 0.406835 | 4,009 | 108 | 70 | 37.12037 | 0.690076 | 0.027688 | 0 | 0.717172 | 0 | 0 | 0.328113 | 0.054685 | 0 | 0 | 0 | 0 | 0.020202 | 1 | 0.030303 | false | 0 | 0.040404 | 0 | 0.090909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
92199d11366c2c64089ade5fac76eacee023eb34 | 392 | py | Python | devops/infrastructure/settings.py | mserrano-dev/DevOps | dcd801160ce3d51c47794d67885057d7d715697c | [
"Apache-2.0"
] | null | null | null | devops/infrastructure/settings.py | mserrano-dev/DevOps | dcd801160ce3d51c47794d67885057d7d715697c | [
"Apache-2.0"
] | null | null | null | devops/infrastructure/settings.py | mserrano-dev/DevOps | dcd801160ce3d51c47794d67885057d7d715697c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
from util import project_fs
from util.array_phpfill import *
# ============================================================================ #
# Amazon Simple Storage Service (S3) Bucket
# ============================================================================ #
def get_mserrano_config():
return project_fs.read_json('.aws/mserrano.config', rel_to_user_home=True)
| 39.2 | 80 | 0.461735 | 36 | 392 | 4.777778 | 0.805556 | 0.093023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002778 | 0.081633 | 392 | 9 | 81 | 43.555556 | 0.475 | 0.543367 | 0 | 0 | 0 | 0 | 0.116279 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0.25 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
a666c6dbe62728aafcde5bbba93d36451d7d71ea | 105 | py | Python | txt2image/misc.py | axju/txt2image | 1e78695d402f97f58fc53fae2ce58803cb1d0586 | [
"MIT"
] | 1 | 2020-05-03T04:03:57.000Z | 2020-05-03T04:03:57.000Z | txt2image/misc.py | axju/txt2image | 1e78695d402f97f58fc53fae2ce58803cb1d0586 | [
"MIT"
] | null | null | null | txt2image/misc.py | axju/txt2image | 1e78695d402f97f58fc53fae2ce58803cb1d0586 | [
"MIT"
] | null | null | null | from random import randrange
def random_color():
return tuple([randrange(0,255) for i in range(3)])
| 21 | 54 | 0.72381 | 17 | 105 | 4.411765 | 0.882353 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.056818 | 0.161905 | 105 | 4 | 55 | 26.25 | 0.795455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
a673490828d76d97c893ce944c9b77731dbb2398 | 208 | py | Python | vega/algorithms/nas/auto_lane/__init__.py | wnov/vega | bf51cbe389d41033c4ae4bc02e5078c3c247c845 | [
"MIT"
] | 6 | 2020-11-13T15:44:47.000Z | 2021-12-02T08:14:06.000Z | vega/algorithms/nas/auto_lane/__init__.py | JacobLee121/vega | 19256aca4d047bfad3b461f0a927e1c2abb9eb03 | [
"MIT"
] | null | null | null | vega/algorithms/nas/auto_lane/__init__.py | JacobLee121/vega | 19256aca4d047bfad3b461f0a927e1c2abb9eb03 | [
"MIT"
] | 2 | 2021-06-25T09:42:32.000Z | 2021-08-06T18:00:09.000Z | from .auto_lane_nas_algorithm import AutoLaneNas
from .auto_lane_nas_codec import AutoLaneNasCodec
import vega
if vega.is_torch_backend():
from .auto_lane_trainer_callback import AutoLaneTrainerCallback
| 29.714286 | 67 | 0.865385 | 28 | 208 | 6.035714 | 0.607143 | 0.142012 | 0.213018 | 0.177515 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.100962 | 208 | 6 | 68 | 34.666667 | 0.903743 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.8 | 0 | 0.8 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
a6a97e4fa4a72509d28196d8174dcc98ed876f5f | 125 | py | Python | Sorting/insertion.py | Krylovsentry/Algorithms | 0cd236f04dc065d5247a6f274bb3db503db591b0 | [
"MIT"
] | 1 | 2016-08-21T13:01:42.000Z | 2016-08-21T13:01:42.000Z | Sorting/insertion.py | Krylovsentry/Algorithms | 0cd236f04dc065d5247a6f274bb3db503db591b0 | [
"MIT"
] | null | null | null | Sorting/insertion.py | Krylovsentry/Algorithms | 0cd236f04dc065d5247a6f274bb3db503db591b0 | [
"MIT"
] | null | null | null | # O(n ** 2)
def insertion_sort(alist):
pass
print([1, 2, 3, 4, 5])
print([1, 3, 99, 21, 23, 18, 44, 12, 21, 144, 441])
| 15.625 | 51 | 0.528 | 26 | 125 | 2.5 | 0.807692 | 0.184615 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.291667 | 0.232 | 125 | 7 | 52 | 17.857143 | 0.385417 | 0.072 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0.25 | 0 | 0 | 0.25 | 0.5 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 7 |
a6d3d65eef481f4428c31ab8b45e668334a43105 | 114 | py | Python | tests/matchers/whois/test_rules.py | t4d/abuse_whois | 32c897da124f6404199484a7395c0f96a7a344f8 | [
"MIT"
] | 6 | 2021-11-25T05:50:36.000Z | 2021-12-11T11:00:58.000Z | tests/matchers/whois/test_rules.py | t4d/abuse_whois | 32c897da124f6404199484a7395c0f96a7a344f8 | [
"MIT"
] | 1 | 2021-12-10T04:28:30.000Z | 2021-12-10T21:09:55.000Z | tests/matchers/whois/test_rules.py | t4d/abuse_whois | 32c897da124f6404199484a7395c0f96a7a344f8 | [
"MIT"
] | 1 | 2021-12-11T11:01:28.000Z | 2021-12-11T11:01:28.000Z | from abuse_whois.matchers.whois.rules import load_rules
def test_load_rules():
assert len(load_rules()) > 0
| 19 | 55 | 0.763158 | 18 | 114 | 4.555556 | 0.666667 | 0.329268 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010204 | 0.140351 | 114 | 5 | 56 | 22.8 | 0.826531 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
47235e1d82ae5e5843dd91ccfaf04b80f48dfd9b | 2,056 | py | Python | sip/test.py | obnauticus/tandbergProxy | 543580ec6d14422a030014437dfe38093851e645 | [
"Apache-2.0"
] | null | null | null | sip/test.py | obnauticus/tandbergProxy | 543580ec6d14422a030014437dfe38093851e645 | [
"Apache-2.0"
] | null | null | null | sip/test.py | obnauticus/tandbergProxy | 543580ec6d14422a030014437dfe38093851e645 | [
"Apache-2.0"
] | null | null | null | import sip
import media
import time
import sys
import server
def test_server():
params={'alias':'test',
'rh':'10.0.4.234',
'rp':5060,
'lh':'10.0.4.43',
'lp':5060,
'transport':'tcp',
}
server.run(params)
def test_re_invite():
params={'alias':'test',
'rh':'10.0.4.234',
'rp':5060,
'lh':'10.0.4.43',
'lp':5060,
'transport':'tcp',
}
sip.send_invite(params)
res = sip.get_invite_response(params)
if (res[0] > 299):
sys.exit(1)
sip.send_ack(params)
print('in call...')
time.sleep(1)
sip.send_re_invite(params, sip.sdp2(params))
res = sip.get_invite_response(params)
sip.send_ack(params)
if (res[0] == 200):
sdp = res[2]
p = media.gst(media.parse_sdp(sdp))
time.sleep(30)
p.kill()
sip.send_bye(params)
time.sleep(1)
def test_call_media():
params={'alias':'test',
'rh':'10.0.4.234',
'rp':5060,
'lh':'10.0.4.43',
'lp':5060,
'transport':'tcp',
}
sip.send_invite(params)
res = sip.get_invite_response(params)
if (res[0] > 299):
sys.exit(1)
sip.send_ack(params)
print('in call...')
sdp = res[2]
p = media.gst(media.parse_sdp(sdp))
time.sleep(30)
p.kill()
sip.send_bye(params)
time.sleep(1)
def test_call2():
params={'alias':'test',
'rh':'10.0.4.234',
'rp':5060,
'lh':'10.0.4.43',
'lp':5060,
'transport':'tcp',
}
sip.send_invite(params)
res = sip.get_invite_response(params)
if (res[0] > 299):
sys.exit(1)
time.sleep(1)
sip.send_ack(params)
print('in call...')
time.sleep(10)
sip.send_bye(params)
time.sleep(1)
#test_server()
test_call_media()
#test_call2()
| 22.844444 | 49 | 0.476167 | 264 | 2,056 | 3.587121 | 0.185606 | 0.081309 | 0.033791 | 0.071806 | 0.801478 | 0.783527 | 0.783527 | 0.719113 | 0.719113 | 0.719113 | 0 | 0.089879 | 0.356031 | 2,056 | 89 | 50 | 23.101124 | 0.625378 | 0.01216 | 0 | 0.769231 | 0 | 0 | 0.114433 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051282 | false | 0 | 0.064103 | 0 | 0.115385 | 0.038462 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5b2e84c6a9d998b2c7ef4714b16ace4b63ed3ee3 | 133 | py | Python | shaner/aggregater/__init__.py | takato86/shaner | e200bfaf27c69aab4a76ab3d589a2bbda4538ca4 | [
"MIT"
] | null | null | null | shaner/aggregater/__init__.py | takato86/shaner | e200bfaf27c69aab4a76ab3d589a2bbda4538ca4 | [
"MIT"
] | null | null | null | shaner/aggregater/__init__.py | takato86/shaner | e200bfaf27c69aab4a76ab3d589a2bbda4538ca4 | [
"MIT"
] | null | null | null | from shaner.aggregater.core import *
from shaner.aggregater.entity.achiever import *
from shaner.aggregater.entity.splitter import *
| 33.25 | 47 | 0.827068 | 17 | 133 | 6.470588 | 0.470588 | 0.272727 | 0.545455 | 0.472727 | 0.581818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.090226 | 133 | 3 | 48 | 44.333333 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
5bbd75f18312c4f52a30803bfd32030d32e46afc | 156 | py | Python | prices/site/views.py | andresfredes/GW2_prices | 1d9222b0d2d5b60c36d0a6305443e4cbcfcd2362 | [
"BSD-3-Clause"
] | null | null | null | prices/site/views.py | andresfredes/GW2_prices | 1d9222b0d2d5b60c36d0a6305443e4cbcfcd2362 | [
"BSD-3-Clause"
] | null | null | null | prices/site/views.py | andresfredes/GW2_prices | 1d9222b0d2d5b60c36d0a6305443e4cbcfcd2362 | [
"BSD-3-Clause"
] | null | null | null | from flask import render_template
from prices.site import site
@site.route('/')
@site.route('/index')
def index():
return render_template('index.html') | 22.285714 | 40 | 0.737179 | 22 | 156 | 5.136364 | 0.545455 | 0.247788 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115385 | 156 | 7 | 40 | 22.285714 | 0.818841 | 0 | 0 | 0 | 0 | 0 | 0.10828 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | true | 0 | 0.333333 | 0.166667 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
5bc5337bcd8d26e65b8056dfc722a39596f57562 | 4,471 | py | Python | dataloaders.py | kumarak93/LinearConv | 2cd9945c769c16206fd380c6b23b76d44102805f | [
"MIT"
] | 7 | 2020-09-05T14:44:40.000Z | 2021-02-21T08:43:14.000Z | dataloaders.py | kkahatapitiya/LinearConv | 2cd9945c769c16206fd380c6b23b76d44102805f | [
"MIT"
] | 2 | 2020-09-21T06:22:54.000Z | 2021-09-29T06:14:42.000Z | dataloaders.py | kumarak93/LinearConv | 2cd9945c769c16206fd380c6b23b76d44102805f | [
"MIT"
] | 2 | 2021-01-06T12:50:35.000Z | 2021-01-29T12:59:57.000Z | import torch
import torchvision
import torchvision.transforms as transforms
# Replace with the corresponding dataloader
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
# CIFAR-10
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),])
trainset = torchvision.datasets.CIFAR10(root=root, train=True,download=True, transform=transform_train)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=True, num_workers=2)
testset = torchvision.datasets.CIFAR10(root=root, train=False, download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=batch_size, shuffle=False, num_workers=2)
dataiter = iter(trainloader)
images, labels = dataiter.next()
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
# CIFAR-100
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.5071, 0.4866, 0.4409), (0.2009, 0.1984, 0.2023)),])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5071, 0.4866, 0.4409), (0.2009, 0.1984, 0.2023)),])
trainset = torchvision.datasets.CIFAR100(root=root, train=True,download=True, transform=transform_train)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=True, num_workers=2)
testset = torchvision.datasets.CIFAR100(root=root, train=False, download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=batch_size, shuffle=False, num_workers=2)
dataiter = iter(trainloader)
images, labels = dataiter.next()
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
# Fashion-MNIST
transform_train = transforms.Compose([
transforms.Pad(2),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.2860,), (0.3205,)),])
transform_test = transforms.Compose([
transforms.Pad(2),
transforms.ToTensor(),
transforms.Normalize((0.2860,), (0.3205,)),])
trainset = torchvision.datasets.FashionMNIST(root=root, train=True, download=True, transform=transform_train)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=True, num_workers=2)
testset = torchvision.datasets.FashionMNIST(root=root, train=False, download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=batch_size, shuffle=False, num_workers=2)
dataiter = iter(trainloader)
images, labels = dataiter.next()
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
# MNIST
transform = transforms.Compose([
transforms.Pad(2),
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3015,)),])
trainset = torchvision.datasets.MNIST(root=root, train=True,download=True, transform=transform)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=True, num_workers=2)
testset = torchvision.datasets.MNIST(root=root, train=False, download=True, transform=transform)
testloader = torch.utils.data.DataLoader(testset, batch_size=batch_size, shuffle=False, num_workers=2)
dataiter = iter(trainloader)
images, labels = dataiter.next()
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
# SVHN
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.ToTensor(),
transforms.Normalize((0.4377, 0.4438, 0.4728), (0.1201, 0.1231, 0.1052)),])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4377, 0.4438, 0.4728), (0.1201, 0.1231, 0.1052)),])
trainset = torchvision.datasets.SVHN(root=root, split='train', download=True, transform=transform_train)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size, shuffle=True, num_workers=2)
testset = torchvision.datasets.SVHN(root=root, split='test', download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=batch_size, shuffle=False, num_workers=2)
dataiter = iter(trainloader)
images, labels = dataiter.next()
| 44.71 | 109 | 0.699843 | 520 | 4,471 | 5.928846 | 0.146154 | 0.058385 | 0.068115 | 0.097308 | 0.932858 | 0.9241 | 0.851443 | 0.841713 | 0.792734 | 0.763542 | 0 | 0.060643 | 0.089018 | 4,471 | 99 | 110 | 45.161616 | 0.696293 | 0.098412 | 0 | 0.782609 | 0 | 0 | 0.002242 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.043478 | 0 | 0.043478 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5bfd5e476c99840e5733da7cff676b1be96eed72 | 93,322 | py | Python | tests/test_query_builder.py | repole/drowsy | 1360068c52d4ef4fbb7bcb2db2e4a5ce9c3c7294 | [
"MIT"
] | 4 | 2016-06-16T20:16:38.000Z | 2020-08-18T19:51:40.000Z | tests/test_query_builder.py | repole/drowsy | 1360068c52d4ef4fbb7bcb2db2e4a5ce9c3c7294 | [
"MIT"
] | 2 | 2017-10-05T11:14:39.000Z | 2018-04-15T00:54:35.000Z | tests/test_query_builder.py | repole/drowsy | 1360068c52d4ef4fbb7bcb2db2e4a5ce9c3c7294 | [
"MIT"
] | null | null | null | """
tests.test_query_builder
~~~~~~~~~~~~~~~~~~~~~~~~
Query builder tests for Drowsy.
"""
# :copyright: (c) 2016-2020 by Nicholas Repole and contributors.
# See AUTHORS for more details.
# :license: MIT - See LICENSE for more details.
from pytest import raises
from sqlalchemy.inspection import inspect
from drowsy.exc import BadRequestError
from drowsy.query_builder import QueryBuilder, ModelResourceQueryBuilder
from drowsy.parser import SubfilterInfo, SortInfo
from tests.base import DrowsyDatabaseTests
from tests.models import (
Album, CompositeOne, CompositeNode, Customer, Employee, Playlist, Track
)
from tests.resources import (
AlbumResource, TrackResource, CompositeNodeResource,
CompositeOneResource, CustomerResource, EmployeeResource,
PlaylistResource
)
class TestDrowsyQueryBuilder(DrowsyDatabaseTests):
"""Test drowsy query building across all databases."""
@staticmethod
def test_apply_sorts_simple(db_session):
"""Test applying a single sort."""
query_builder = QueryBuilder()
query = db_session.query(Album)
query = query_builder.apply_sorts(
query=query,
sorts=[SortInfo(attr="album_id", direction="ASC")]
)
results = query.all()
last_album_id = -1
for result in results:
assert result.album_id >= last_album_id
last_album_id = result.album_id
@staticmethod
def test_apply_sorts_fail(db_session):
"""Test applying a single sort."""
query_builder = QueryBuilder()
query = db_session.query(Album)
with raises(AttributeError):
query_builder.apply_sorts(
query=query,
sorts=[SortInfo(attr="badattr", direction="ASC")]
)
@staticmethod
def test_apply_limit_negative_limit_fail(db_session):
"""Test that a negative limit fails."""
query_builder = QueryBuilder()
query = db_session.query(Album)
with raises(ValueError):
query_builder.apply_limit(
query=query,
limit=-1)
@staticmethod
def test_apply_offset_negative_offset_fail(db_session):
"""Test that a negative offset fails."""
query_builder = QueryBuilder()
query = db_session.query(Album)
with raises(ValueError):
query_builder.apply_offset(
query=query,
offset=-1)
@staticmethod
def test_simple_subfilter(db_session):
"""Test applying a simple subfilter."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": 5}
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[]
)
albums = query.all()
for album in albums:
assert len(album.tracks) <= 1
if album.tracks:
assert album.tracks[0].track_id == 5
@staticmethod
def test_apply_sorts_bad_query(db_session):
"""Test applying sorts with a bad query fails."""
query_builder = QueryBuilder()
query = db_session.query(Album, Track)
with raises(ValueError):
query_builder.apply_sorts(
query,
sorts=[])
@staticmethod
def test_subfilter_sorts_no_limit_offset_fail(db_session):
"""Check that subresource sorts without limit or offset fail."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
sorts=[SortInfo(attr="track_id", direction="ASC")]
)
}
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True
)
assert excinf.value.code == "invalid_subresource_sorts"
@staticmethod
def test_simple_subfilter_limit_too_big(db_session):
"""Check that a limit too large on subresource fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Customer)
subfilters = {
"invoices": SubfilterInfo(
offset=1,
limit=10000
)
}
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=CustomerResource(
session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True
)
assert excinf.value.code == "invalid_subresource_limit"
@staticmethod
def test_subfilter_invalid_fail(db_session):
"""Check that bad subresource filters fail."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$bad": 5}}
)
}
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True
)
assert excinf.value.code == "filters_field_op_error"
@staticmethod
def test_subfilter_invalid_ignore(db_session):
"""Check that non strict bad subresource filters is ignored."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$bad": 5}}
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=False,
strict=False
)
result = query.all()
assert len(result) > 0
@staticmethod
def test_subfilter_invalid_limit_fail(db_session):
"""Check subresource query with an invalid root limit fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
sorts=[SortInfo(attr="track_id", direction="ASC")]
)
}
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True,
limit=-1
)
assert excinf.value.code == "invalid_limit_value"
@staticmethod
def test_subfilter_invalid_limit_ignore(db_session):
"""Check subresource query with invalid root limit ignored."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
sorts=[SortInfo(attr="track_id", direction="ASC")]
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True,
limit=-1,
strict=False
)
assert query is not None
@staticmethod
def test_subfilter_invalid_sort_fail(db_session):
"""Check subresource query with an invalid root sort fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
sorts=[SortInfo(attr="track_id")]
)
}
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
sorts=[SortInfo(attr="TEST")],
subfilters=subfilters,
embeds=[],
dialect_override=True
)
assert excinf.value.code == "invalid_sort_field"
@staticmethod
def test_subfilter_invalid_sort_ignore(db_session):
"""Check subresource query with invalid root sort is ignored."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
sorts=[SortInfo(attr="track_id")]
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
subfilters=subfilters,
sorts=[SortInfo(attr="TEST")],
embeds=[],
dialect_override=True,
strict=False
)
assert query is not None
@staticmethod
def test_subfilter_root_sort(db_session):
"""Check subresource query with root sort works."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
sorts=[SortInfo(attr="track_id")]
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
subfilters=subfilters,
sorts=[SortInfo(attr="album_id", direction="DESC")],
embeds=[],
dialect_override=True,
strict=False
)
result = query.first()
assert result.album_id == 347
@staticmethod
def test_subfilter_invalid_offset_fail(db_session):
"""Check subresource query with an invalid root offset fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
sorts=[SortInfo(attr="track_id", direction="ASC")]
)
}
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True,
offset=-1
)
assert excinf.value.code == "invalid_offset_value"
@staticmethod
def test_subfilter_invalid_offset_ignore(db_session):
"""Check subresource query with invalid root offset ignored."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
sorts=[SortInfo(attr="track_id", direction="ASC")]
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True,
offset=-1,
strict=False
)
assert query is not None
@staticmethod
def test_many_to_one_limit_fail(db_session):
"""Test a limit/offset on a many to one relationship fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Track)
subfilters = {
"album": SubfilterInfo(
offset=1,
limit=None
)
}
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=TrackResource(
session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True
)
assert excinf.value.code == "invalid_subresource_options"
@staticmethod
def test_subquery_embeds(db_session):
"""Test that a simple subquery can work alongside an embed."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"artist": SubfilterInfo(
filters={"artist_id": 1}
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=["tracks", "artist"]
)
albums = query.all()
for album in albums:
res = inspect(album)
assert "tracks" not in res.unloaded
if album.artist:
assert album.artist.artist_id == 1
@staticmethod
def test_same_subquery_embeds(db_session):
"""Test that a simple subquery works with a duplicate embed."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": 1}
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=["tracks"]
)
albums = query.all()
for album in albums:
res = inspect(album)
assert "tracks" not in res.unloaded
if album.tracks:
assert len(album.tracks) == 1
assert album.tracks[0].track_id == 1
@staticmethod
def test_simple_embeds(db_session):
"""Test that a simple embed works."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters={},
embeds=["tracks"]
)
albums = query.all()
for album in albums:
res = inspect(album)
assert "tracks" not in res.unloaded
@staticmethod
def test_property_embeds(db_session):
"""Test that property embed works."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters={},
embeds=["tracks.track_id"]
)
albums = query.all()
for album in albums:
res = inspect(album)
assert "tracks" not in res.unloaded
@staticmethod
def test_bad_embeds(db_session):
"""Test that a bad property embed fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters={},
embeds=["tracks.track_id.playlistId"]
)
assert excinf.value.code == "invalid_embed"
@staticmethod
def test_bad_embeds_ignore(db_session):
"""Test that a non strict bad property embed is ignored."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters={},
embeds=["tracks.track_id.playlistId"],
strict=False
)
result = query.all()
assert len(result) > 0
@staticmethod
def test_too_complex(db_session):
"""Test that an overly complex query fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
with raises(BadRequestError) as excinf:
query_builder.build(
query=query,
resource=AlbumResource(session=db_session),
filters={"tracks.track_id": 5},
subfilters={},
stack_size_limit=1
)
assert excinf.value.code == "filters_too_complex"
@staticmethod
def test_no_op_error_message(db_session):
"""Test that filters trigger an error message with no $op."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
with raises(BadRequestError) as excinf:
query_builder.build(
query=query,
resource=AlbumResource(session=db_session),
filters={"tracks": {}},
subfilters={},
stack_size_limit=1
)
assert excinf.value.code == "filters_field_error"
@staticmethod
def test_bad_subfilters(db_session):
"""Test that a bad property subfilter fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters={
"tracks.track_id": SubfilterInfo(
filters={"track_id": 5}
)
},
embeds=[]
)
assert excinf.value.code == "invalid_subresource"
@staticmethod
def test_bad_subfilters_value(db_session):
"""Test that a bad property subfilter value fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
with raises(ValueError):
query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters={
"tracks.track_id": "test"
},
embeds=[]
)
@staticmethod
def test_non_strict_bad_subfilters(db_session):
"""Test bad subfitlers don't cause failure when not strict."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters={
"tracks.track_id": SubfilterInfo(
filters={"track_id": 5}
)
},
embeds=[],
strict=False
)
albums = query.all()
assert len(albums) == 347
# TODO - review whether we want this to not load subresource
# for album in albums:
# res = inspect(album)
# assert "tracks" in res.unloaded)
@staticmethod
def test_whitelist_fail(db_session):
"""Test a missing whitelist key causes permission error."""
from drowsy.exc import PermissionDeniedError
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Employee)
resource = EmployeeResource(session=db_session)
with raises(PermissionDeniedError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=resource,
subfilters={
"customers": SubfilterInfo(
filters={"phone": 5}
)
},
embeds=[],
strict=True
)
assert excinf.value.code == "filters_permission_error"
@staticmethod
def test_self_referential_composite_id_subquery(db_session):
"""Test a self referential, composite id subquery"""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(CompositeNode)
subfilters = {
"children": SubfilterInfo(
filters={"node_id": 1}
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=CompositeNodeResource(session=db_session),
subfilters=subfilters,
embeds=[]
)
composite_nodes = query.all()
for composite_node in composite_nodes:
res = inspect(composite_node)
assert "children" not in res.unloaded
for child in composite_node.children:
assert child.node_id == 1
@staticmethod
def test_root_composite_id_limit_with_subquery(db_session):
"""Limit to a composite id root resource using subqueries."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(CompositeNode)
subfilters = {
"children": SubfilterInfo(
filters={"node_id": 6}
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=CompositeNodeResource(session=db_session),
subfilters=subfilters,
limit=1,
offset=0,
embeds=[]
)
result = query.all()
assert result is not None
assert len(result) == 1
assert result[0].node_id == 1
assert len(result[0].children) == 1
assert result[0].children[0].node_id == 6
@staticmethod
def test_root_limit_with_subquery(db_session):
"""Test applying a limit to a root resource using subqueries."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": 2}
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
limit=1,
offset=1,
embeds=[]
)
result = query.all()
assert result is not None
assert len(result) == 1
assert result[0].album_id == 2
assert len(result[0].tracks) == 1
assert result[0].tracks[0].track_id == 2
@staticmethod
def test_circular_relationship_fails(db_session):
"""Referencing the same relationship multiple times fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Playlist)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 5}},
limit=5,
sorts=[SortInfo(attr="name", direction="ASC")]
),
"tracks.playlists": SubfilterInfo(
filters={"playlist_id": {"$lte": 6}},
limit=4,
sorts=[SortInfo(attr="name", direction="ASC")]
),
"tracks.playlists.tracks": SubfilterInfo(
filters={"track_id": {"$gte": 5}},
limit=5,
sorts=[SortInfo(attr="name", direction="ASC")]
),
}
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=PlaylistResource(session=db_session),
subfilters=subfilters,
embeds=[],
limit=3,
dialect_override=True
)
assert excinf.value.code == "invalid_subresource_multi_embed"
@staticmethod
def test_bad_subfilter_with_limit(db_session):
"""Test bad subfilter using a limit fails."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$bad": 5}},
limit=1
)
}
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True,
strict=True
)
assert excinf.value.code == "filters_field_op_error"
class TestDrowsyQueryBuilderSqlite(DrowsyDatabaseTests):
"""Sqlite specific query builder tests."""
backends = ['sqlite']
@staticmethod
def test_root_limit_with_subquery(db_session):
"""Apply limit to root resource with subqueries & no row_num."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": 2}
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
limit=1,
offset=1,
embeds=[],
dialect_override=False
)
result = query.all()
assert result is not None
assert len(result) == 1
assert result[0].album_id == 2
assert len(result[0].tracks) == 1
assert result[0].tracks[0].track_id == 2
@staticmethod
def test_root_composite_id_limit_with_subquery(db_session):
"""Limit composite id root using subqueries without row_num."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(CompositeNode)
subfilters = {
"children": SubfilterInfo(
filters={"node_id": 6}
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=CompositeNodeResource(session=db_session),
subfilters=subfilters,
limit=1,
offset=0,
embeds=[],
dialect_override=False
)
result = query.all()
assert result is not None
assert len(result) == 1
assert result[0].node_id == 1
assert len(result[0].children) == 1
assert result[0].children[0].node_id == 6
@staticmethod
def test_root_and_nested_limit_offset(db_session):
"""Test offset and limit in both root and nested collections."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 15}},
offset=1,
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[],
limit=10,
offset=1,
dialect_override=True
)
expected_query = (
"""
SELECT
anon_1."Album_AlbumId" AS "anon_1_Album_AlbumId",
anon_1."Album_Title" AS "anon_1_Album_Title",
anon_1."Album_ArtistId" AS "anon_1_Album_ArtistId",
"Track1"."Track1_TrackId" AS "Track1_Track1_TrackId",
"Track1"."Track1_Name" AS "Track1_Track1_Name",
"Track1"."Track1_AlbumId" AS "Track1_Track1_AlbumId",
"Track1"."Track1_MediaTypeId" AS "Track1_Track1_MediaTypeId",
"Track1"."Track1_GenreId" AS "Track1_Track1_GenreId",
"Track1"."Track1_Composer" AS "Track1_Track1_Composer",
"Track1"."Track1_Milliseconds" AS "Track1_Track1_Milliseconds",
"Track1"."Track1_Bytes" AS "Track1_Track1_Bytes",
"Track1"."Track1_UnitPrice" AS "Track1_Track1_UnitPrice"
FROM
(
SELECT
"Album"."AlbumId" AS "Album_AlbumId",
"Album"."Title" AS "Album_Title",
"Album"."ArtistId" AS "Album_ArtistId",
row_number() OVER (
ORDER BY "Album"."AlbumId" ASC) AS row_number
FROM
"Album"
) AS anon_1
LEFT OUTER JOIN
(
SELECT
q1."Track1_TrackId" AS "Track1_TrackId",
q1."Track1_Name" AS "Track1_Name",
q1."Track1_AlbumId" AS "Track1_AlbumId",
q1."Track1_MediaTypeId" AS "Track1_MediaTypeId",
q1."Track1_GenreId" AS "Track1_GenreId",
q1."Track1_Composer" AS "Track1_Composer",
q1."Track1_Milliseconds" AS "Track1_Milliseconds",
q1."Track1_Bytes" AS "Track1_Bytes",
q1."Track1_UnitPrice" AS "Track1_UnitPrice",
q1.row_number AS row_number
FROM
(
SELECT
"Track1"."TrackId" AS "Track1_TrackId",
"Track1"."Name" AS "Track1_Name",
"Track1"."AlbumId" AS "Track1_AlbumId",
"Track1"."MediaTypeId" AS "Track1_MediaTypeId",
"Track1"."GenreId" AS "Track1_GenreId",
"Track1"."Composer" AS "Track1_Composer",
"Track1"."Milliseconds" AS
"Track1_Milliseconds",
"Track1"."Bytes" AS "Track1_Bytes",
"Track1"."UnitPrice" AS "Track1_UnitPrice",
row_number() OVER (
PARTITION BY "Track1"."AlbumId"
ORDER BY "Track1"."TrackId" ASC
) AS row_number
FROM
"Track" AS "Track1"
WHERE
"Track1"."TrackId" >= ?
) AS q1
WHERE
q1.row_number >= ?
AND
q1.row_number <= ?
) AS "Track1" ON
anon_1."Album_AlbumId" = "Track1"."Track1_AlbumId"
WHERE
anon_1.row_number >= ?
AND
anon_1.row_number <= ?
ORDER BY
anon_1.row_number
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
@staticmethod
def test_simple_subfilter_limit_offset(db_session):
"""Test offset and limit in a subresource."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 5}},
offset=1,
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True
)
expected_query = (
"""
SELECT
"Album"."AlbumId" AS "Album_AlbumId",
"Album"."Title" AS "Album_Title",
"Album"."ArtistId" AS "Album_ArtistId",
"Track1"."Track1_TrackId" AS "Track1_Track1_TrackId",
"Track1"."Track1_Name" AS "Track1_Track1_Name",
"Track1"."Track1_AlbumId" AS "Track1_Track1_AlbumId",
"Track1"."Track1_MediaTypeId" AS "Track1_Track1_MediaTypeId",
"Track1"."Track1_GenreId" AS "Track1_Track1_GenreId",
"Track1"."Track1_Composer" AS "Track1_Track1_Composer",
"Track1"."Track1_Milliseconds" AS "Track1_Track1_Milliseconds",
"Track1"."Track1_Bytes" AS "Track1_Track1_Bytes",
"Track1"."Track1_UnitPrice" AS "Track1_Track1_UnitPrice"
FROM
"Album"
LEFT OUTER JOIN (
SELECT
q1."Track1_TrackId" AS "Track1_TrackId",
q1."Track1_Name" AS "Track1_Name",
q1."Track1_AlbumId" AS "Track1_AlbumId",
q1."Track1_MediaTypeId" AS "Track1_MediaTypeId",
q1."Track1_GenreId" AS "Track1_GenreId",
q1."Track1_Composer" AS "Track1_Composer",
q1."Track1_Milliseconds" AS "Track1_Milliseconds",
q1."Track1_Bytes" AS "Track1_Bytes",
q1."Track1_UnitPrice" AS "Track1_UnitPrice",
q1.row_number AS row_number
FROM
(
SELECT
"Track1"."TrackId" AS "Track1_TrackId",
"Track1"."Name" AS "Track1_Name",
"Track1"."AlbumId" AS "Track1_AlbumId",
"Track1"."MediaTypeId" AS "Track1_MediaTypeId",
"Track1"."GenreId" AS "Track1_GenreId",
"Track1"."Composer" AS "Track1_Composer",
"Track1"."Milliseconds" AS
"Track1_Milliseconds",
"Track1"."Bytes" AS "Track1_Bytes",
"Track1"."UnitPrice" AS "Track1_UnitPrice",
row_number() OVER (
PARTITION BY "Track1"."AlbumId"
ORDER BY "Track1"."TrackId" ASC
) AS row_number
FROM
"Track" AS "Track1"
WHERE
"Track1"."TrackId" >= ?
) AS q1
WHERE
q1.row_number >= ?
AND
q1.row_number <= ?
) AS "Track1" ON "Album"."AlbumId" = "Track1"."Track1_AlbumId"
ORDER BY
"Album"."AlbumId" ASC
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
@staticmethod
def test_subfilter_limit_offset_sorts(db_session):
"""Test subfiltering with sorts works with limit and offset."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 5}},
offset=1,
limit=1,
sorts=[SortInfo(attr="name", direction="ASC")]
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True
)
expected_query = (
"""
SELECT
"Album"."AlbumId" AS "Album_AlbumId",
"Album"."Title" AS "Album_Title",
"Album"."ArtistId" AS "Album_ArtistId",
"Track1"."Track1_TrackId" AS "Track1_Track1_TrackId",
"Track1"."Track1_Name" AS "Track1_Track1_Name",
"Track1"."Track1_AlbumId" AS "Track1_Track1_AlbumId",
"Track1"."Track1_MediaTypeId" AS "Track1_Track1_MediaTypeId",
"Track1"."Track1_GenreId" AS "Track1_Track1_GenreId",
"Track1"."Track1_Composer" AS "Track1_Track1_Composer",
"Track1"."Track1_Milliseconds" AS "Track1_Track1_Milliseconds",
"Track1"."Track1_Bytes" AS "Track1_Track1_Bytes",
"Track1"."Track1_UnitPrice" AS "Track1_Track1_UnitPrice"
FROM
"Album"
LEFT OUTER JOIN (
SELECT
q1."Track1_TrackId" AS "Track1_TrackId",
q1."Track1_Name" AS "Track1_Name",
q1."Track1_AlbumId" AS "Track1_AlbumId",
q1."Track1_MediaTypeId" AS "Track1_MediaTypeId",
q1."Track1_GenreId" AS "Track1_GenreId",
q1."Track1_Composer" AS "Track1_Composer",
q1."Track1_Milliseconds" AS "Track1_Milliseconds",
q1."Track1_Bytes" AS "Track1_Bytes",
q1."Track1_UnitPrice" AS "Track1_UnitPrice",
q1.row_number AS row_number
FROM
(
SELECT
"Track1"."TrackId" AS "Track1_TrackId",
"Track1"."Name" AS "Track1_Name",
"Track1"."AlbumId" AS "Track1_AlbumId",
"Track1"."MediaTypeId" AS "Track1_MediaTypeId",
"Track1"."GenreId" AS "Track1_GenreId",
"Track1"."Composer" AS "Track1_Composer",
"Track1"."Milliseconds" AS
"Track1_Milliseconds",
"Track1"."Bytes" AS "Track1_Bytes",
"Track1"."UnitPrice" AS "Track1_UnitPrice",
row_number() OVER (
PARTITION BY "Track1"."AlbumId"
ORDER BY "Track1"."Name" ASC
) AS row_number
FROM
"Track" AS "Track1"
WHERE
"Track1"."TrackId" >= ?
) AS q1
WHERE
q1.row_number >= ?
AND
q1.row_number <= ?
) AS "Track1" ON "Album"."AlbumId" = "Track1"."Track1_AlbumId"
ORDER BY
"Album"."AlbumId" ASC
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
@staticmethod
def test_non_strict_bad_sublimits(db_session):
"""Test bad sublimits don't cause failure when not strict."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Customer)
query = query_builder.apply_subquery_loads(
query=query,
resource=CustomerResource(session=db_session),
subfilters={
"invoices": SubfilterInfo(
offset=1,
limit=10000
)
},
embeds=[],
strict=False,
dialect_override=True
)
expected_query = (
"""
SELECT
"Customer"."CustomerId" AS "Customer_CustomerId",
"Customer"."FirstName" AS "Customer_FirstName",
"Customer"."LastName" AS "Customer_LastName",
"Customer"."Company" AS "Customer_Company",
"Customer"."Address" AS "Customer_Address",
"Customer"."City" AS "Customer_City",
"Customer"."State" AS "Customer_State",
"Customer"."Country" AS "Customer_Country",
"Customer"."PostalCode" AS "Customer_PostalCode",
"Customer"."Phone" AS "Customer_Phone",
"Customer"."Fax" AS "Customer_Fax",
"Customer"."Email" AS "Customer_Email",
"Customer"."SupportRepId" AS "Customer_SupportRepId",
"Invoice1"."Invoice1_InvoiceId" AS
"Invoice1_Invoice1_InvoiceId",
"Invoice1"."Invoice1_CustomerId" AS
"Invoice1_Invoice1_CustomerId",
"Invoice1"."Invoice1_InvoiceDate" AS
"Invoice1_Invoice1_InvoiceDate",
"Invoice1"."Invoice1_BillingAddress" AS
"Invoice1_Invoice1_BillingAddress",
"Invoice1"."Invoice1_BillingCity" AS
"Invoice1_Invoice1_BillingCity",
"Invoice1"."Invoice1_BillingState" AS
"Invoice1_Invoice1_BillingState",
"Invoice1"."Invoice1_BillingCountry" AS
"Invoice1_Invoice1_BillingCountry",
"Invoice1"."Invoice1_BillingPostalCode" AS
"Invoice1_Invoice1_BillingPostalCode",
"Invoice1"."Invoice1_Total" AS "Invoice1_Invoice1_Total"
FROM
"Customer"
LEFT OUTER JOIN (
SELECT
q1."Invoice1_InvoiceId" AS "Invoice1_InvoiceId",
q1."Invoice1_CustomerId" AS "Invoice1_CustomerId",
q1."Invoice1_InvoiceDate" AS "Invoice1_InvoiceDate",
q1."Invoice1_BillingAddress" AS
"Invoice1_BillingAddress",
q1."Invoice1_BillingCity" AS "Invoice1_BillingCity",
q1."Invoice1_BillingState" AS "Invoice1_BillingState",
q1."Invoice1_BillingCountry" AS
"Invoice1_BillingCountry",
q1."Invoice1_BillingPostalCode" AS
"Invoice1_BillingPostalCode",
q1."Invoice1_Total" AS "Invoice1_Total",
q1.row_number AS row_number
FROM
(
SELECT
"Invoice1"."InvoiceId" AS "Invoice1_InvoiceId",
"Invoice1"."CustomerId" AS
"Invoice1_CustomerId",
"Invoice1"."InvoiceDate" AS
"Invoice1_InvoiceDate",
"Invoice1"."BillingAddress" AS
"Invoice1_BillingAddress",
"Invoice1"."BillingCity" AS
"Invoice1_BillingCity",
"Invoice1"."BillingState" AS
"Invoice1_BillingState",
"Invoice1"."BillingCountry" AS
"Invoice1_BillingCountry",
"Invoice1"."BillingPostalCode" AS
"Invoice1_BillingPostalCode",
"Invoice1"."Total" AS "Invoice1_Total",
row_number() OVER (
PARTITION BY "Invoice1"."CustomerId"
ORDER BY "Invoice1"."InvoiceId" ASC
) AS row_number
FROM
"Invoice" AS "Invoice1"
) AS q1
WHERE
q1.row_number >= ?
AND
q1.row_number <= ?
) AS "Invoice1" ON
"Invoice1"."Invoice1_CustomerId" = "Customer"."CustomerId"
ORDER BY
"Customer"."CustomerId" ASC
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
@staticmethod
def test_self_ref_composite_id_subquery_with_limit(db_session):
"""Self referential a composite id subquery with a limit"""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(CompositeNode)
subfilters = {
"children": SubfilterInfo(
filters={"node_id": {"$in": [1, 2]}},
offset=1,
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=CompositeNodeResource(session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True
)
expected_query = (
"""
SELECT
"CompositeNode"."NodeId" AS "CompositeNode_NodeId",
"CompositeNode"."CompositeId" AS "CompositeNode_CompositeId",
"CompositeNode1"."CompositeNode1_NodeId" AS
"CompositeNode1_CompositeNode1_NodeId",
"CompositeNode1"."CompositeNode1_CompositeId" AS
"CompositeNode1_CompositeNode1_CompositeId"
FROM
"CompositeNode"
LEFT OUTER JOIN (
SELECT
q1."CompositeNode1_NodeId" AS "CompositeNode1_NodeId",
q1."CompositeNode1_CompositeId" AS
"CompositeNode1_CompositeId",
q1."CompositeNodeToCompositeNode_NodeId" AS
"CompositeNodeToCompositeNode_NodeId",
q1."CompositeNodeToCompositeNode_CompositeId" AS
"CompositeNodeToCompositeNode_CompositeId",
q1."CompositeNodeToCompositeNode_ChildNodeId" AS
"CompositeNodeToCompositeNode_ChildNodeId",
q1."CompositeNodeToCompositeNode_ChildCompositeId" AS
"CompositeNodeToCompositeNode_ChildCompositeId",
q1.row_number AS row_number
FROM
(
SELECT
"CompositeNode1"."NodeId" AS
"CompositeNode1_NodeId",
"CompositeNode1"."CompositeId" AS
"CompositeNode1_CompositeId",
"CompositeNodeToCompositeNode"."NodeId" AS
"CompositeNodeToCompositeNode_NodeId",
"CompositeNodeToCompositeNode"."CompositeId" AS
"CompositeNodeToCompositeNode_CompositeId",
"CompositeNodeToCompositeNode"."ChildNodeId" AS
"CompositeNodeToCompositeNode_ChildNodeId",
"CompositeNodeToCompositeNode".
"ChildCompositeId" AS
"CompositeNodeToCompositeNode_
ChildCompositeId",
row_number() OVER (
PARTITION BY
"CompositeNodeToCompositeNode".
"NodeId",
"CompositeNodeToCompositeNode".
"CompositeId"
ORDER BY
"CompositeNode1"."NodeId" ASC,
"CompositeNode1"."CompositeId" ASC
) AS row_number
FROM
"CompositeNode" AS "CompositeNode1"
JOIN "CompositeNodeToCompositeNode" ON
"CompositeNodeToCompositeNode".
"ChildNodeId" =
"CompositeNode1"."NodeId"
AND
"CompositeNodeToCompositeNode".
"ChildCompositeId" =
"CompositeNode1"."CompositeId"
WHERE
"CompositeNode1"."NodeId" IN (?, ?)
) AS q1
WHERE
q1.row_number >= ?
AND
q1.row_number <= ?
) AS "CompositeNode1" ON
"CompositeNode"."NodeId" =
"CompositeNode1"."CompositeNodeToCompositeNode_NodeId"
AND
"CompositeNode"."CompositeId" =
"CompositeNode1"."CompositeNodeToCompositeNode_CompositeId"
ORDER BY
"CompositeNode"."NodeId" ASC,
"CompositeNode"."CompositeId" ASC
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
@staticmethod
def test_multilevel_subfilter_limit(db_session):
"""Test subfiltering with sorts works with limit and offset."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 5}},
limit=4,
sorts=[SortInfo(attr="name", direction="ASC")]
),
"tracks.playlists": SubfilterInfo(
filters={"playlist_id": {"$gte": 6}},
limit=5,
sorts=[SortInfo(attr="name", direction="ASC")]
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[],
limit=3,
dialect_override=True
)
expected_query = (
"""
SELECT
anon_1."Album_AlbumId" AS "anon_1_Album_AlbumId",
anon_1."Album_Title" AS "anon_1_Album_Title",
anon_1."Album_ArtistId" AS "anon_1_Album_ArtistId",
"Track1"."Track1_TrackId" AS "Track1_Track1_TrackId",
"Track1"."Track1_Name" AS "Track1_Track1_Name",
"Track1"."Track1_AlbumId" AS "Track1_Track1_AlbumId",
"Track1"."Track1_MediaTypeId" AS "Track1_Track1_MediaTypeId",
"Track1"."Track1_GenreId" AS "Track1_Track1_GenreId",
"Track1"."Track1_Composer" AS "Track1_Track1_Composer",
"Track1"."Track1_Milliseconds" AS "Track1_Track1_Milliseconds",
"Track1"."Track1_Bytes" AS "Track1_Track1_Bytes",
"Track1"."Track1_UnitPrice" AS "Track1_Track1_UnitPrice",
"Playlist1"."Playlist1_PlaylistId" AS
"Playlist1_Playlist1_PlaylistId",
"Playlist1"."Playlist1_Name" AS "Playlist1_Playlist1_Name"
FROM
(
SELECT
"Album"."AlbumId" AS "Album_AlbumId",
"Album"."Title" AS "Album_Title",
"Album"."ArtistId" AS "Album_ArtistId",
row_number() OVER (
ORDER BY
"Album"."AlbumId" ASC
) AS row_number
FROM
"Album"
) AS anon_1
LEFT OUTER JOIN (
SELECT
q1."Track1_TrackId" AS "Track1_TrackId",
q1."Track1_Name" AS "Track1_Name",
q1."Track1_AlbumId" AS "Track1_AlbumId",
q1."Track1_MediaTypeId" AS "Track1_MediaTypeId",
q1."Track1_GenreId" AS "Track1_GenreId",
q1."Track1_Composer" AS "Track1_Composer",
q1."Track1_Milliseconds" AS "Track1_Milliseconds",
q1."Track1_Bytes" AS "Track1_Bytes",
q1."Track1_UnitPrice" AS "Track1_UnitPrice",
q1.row_number AS row_number
FROM
(
SELECT
"Track1"."TrackId" AS "Track1_TrackId",
"Track1"."Name" AS "Track1_Name",
"Track1"."AlbumId" AS "Track1_AlbumId",
"Track1"."MediaTypeId" AS "Track1_MediaTypeId",
"Track1"."GenreId" AS "Track1_GenreId",
"Track1"."Composer" AS "Track1_Composer",
"Track1"."Milliseconds" AS
"Track1_Milliseconds",
"Track1"."Bytes" AS "Track1_Bytes",
"Track1"."UnitPrice" AS "Track1_UnitPrice",
row_number() OVER (
PARTITION BY "Track1"."AlbumId"
ORDER BY
"Track1"."Name" ASC
) AS row_number
FROM
"Track" AS "Track1"
WHERE
"Track1"."TrackId" >= ?
) AS q1
WHERE
q1.row_number >= ?
AND q1.row_number <= ?
) AS "Track1" ON
anon_1."Album_AlbumId" = "Track1"."Track1_AlbumId"
LEFT OUTER JOIN (
SELECT
q1."Playlist1_PlaylistId" AS "Playlist1_PlaylistId",
q1."Playlist1_Name" AS "Playlist1_Name",
q1."PlaylistTrack_PlaylistId" AS
"PlaylistTrack_PlaylistId",
q1."PlaylistTrack_TrackId" AS "PlaylistTrack_TrackId",
q1.row_number AS row_number
FROM
(
SELECT
"Playlist1"."PlaylistId" AS
"Playlist1_PlaylistId",
"Playlist1"."Name" AS "Playlist1_Name",
"PlaylistTrack"."PlaylistId" AS
"PlaylistTrack_PlaylistId",
"PlaylistTrack"."TrackId" AS
"PlaylistTrack_TrackId",
row_number() OVER (
PARTITION BY "PlaylistTrack"."TrackId"
ORDER BY
"Playlist1"."Name" ASC
) AS row_number
FROM
"Playlist" AS "Playlist1"
JOIN "PlaylistTrack" ON
"PlaylistTrack"."PlaylistId" =
"Playlist1"."PlaylistId"
WHERE
"Playlist1"."PlaylistId" >= ?
) AS q1
WHERE
q1.row_number >= ?
AND q1.row_number <= ?
) AS "Playlist1" ON
"Playlist1"."PlaylistTrack_TrackId" =
"Track1"."Track1_TrackId"
WHERE
anon_1.row_number >= ?
AND
anon_1.row_number <= ?
ORDER BY
anon_1.row_number
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
@staticmethod
def test_many_to_many_subresource_limit(db_session):
"""Many to many relationships with limits loaded properly."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Playlist)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 5}},
limit=5,
sorts=[SortInfo(attr="name", direction="ASC")]
),
"tracks.playlists": SubfilterInfo(
filters={"playlist_id": {"$lte": 6}},
limit=4,
sorts=[SortInfo(attr="name", direction="ASC")]
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=PlaylistResource(session=db_session),
subfilters=subfilters,
embeds=[],
limit=3,
dialect_override=True
)
expected_query = (
"""
SELECT
anon_1."Playlist_PlaylistId" AS "anon_1_Playlist_PlaylistId",
anon_1."Playlist_Name" AS "anon_1_Playlist_Name",
"Track1"."Track1_TrackId" AS "Track1_Track1_TrackId",
"Track1"."Track1_Name" AS "Track1_Track1_Name",
"Track1"."Track1_AlbumId" AS "Track1_Track1_AlbumId",
"Track1"."Track1_MediaTypeId" AS "Track1_Track1_MediaTypeId",
"Track1"."Track1_GenreId" AS "Track1_Track1_GenreId",
"Track1"."Track1_Composer" AS "Track1_Track1_Composer",
"Track1"."Track1_Milliseconds" AS "Track1_Track1_Milliseconds",
"Track1"."Track1_Bytes" AS "Track1_Track1_Bytes",
"Track1"."Track1_UnitPrice" AS "Track1_Track1_UnitPrice",
"Playlist1"."Playlist1_PlaylistId" AS
"Playlist1_Playlist1_PlaylistId",
"Playlist1"."Playlist1_Name" AS "Playlist1_Playlist1_Name"
FROM
(
SELECT
"Playlist"."PlaylistId" AS "Playlist_PlaylistId",
"Playlist"."Name" AS "Playlist_Name",
row_number() OVER (
ORDER BY
"Playlist"."PlaylistId" ASC
) AS row_number
FROM
"Playlist"
) AS anon_1
LEFT OUTER JOIN (
SELECT
q1."Track1_TrackId" AS "Track1_TrackId",
q1."Track1_Name" AS "Track1_Name",
q1."Track1_AlbumId" AS "Track1_AlbumId",
q1."Track1_MediaTypeId" AS "Track1_MediaTypeId",
q1."Track1_GenreId" AS "Track1_GenreId",
q1."Track1_Composer" AS "Track1_Composer",
q1."Track1_Milliseconds" AS "Track1_Milliseconds",
q1."Track1_Bytes" AS "Track1_Bytes",
q1."Track1_UnitPrice" AS "Track1_UnitPrice",
q1."PlaylistTrack_PlaylistId" AS
"PlaylistTrack_PlaylistId",
q1."PlaylistTrack_TrackId" AS "PlaylistTrack_TrackId",
q1.row_number AS row_number
FROM
(
SELECT
"Track1"."TrackId" AS "Track1_TrackId",
"Track1"."Name" AS "Track1_Name",
"Track1"."AlbumId" AS "Track1_AlbumId",
"Track1"."MediaTypeId" AS "Track1_MediaTypeId",
"Track1"."GenreId" AS "Track1_GenreId",
"Track1"."Composer" AS "Track1_Composer",
"Track1"."Milliseconds" AS
"Track1_Milliseconds",
"Track1"."Bytes" AS "Track1_Bytes",
"Track1"."UnitPrice" AS "Track1_UnitPrice",
"PlaylistTrack"."PlaylistId" AS
"PlaylistTrack_PlaylistId",
"PlaylistTrack"."TrackId" AS
"PlaylistTrack_TrackId",
row_number() OVER (
PARTITION BY
"PlaylistTrack"."PlaylistId"
ORDER BY
"Track1"."Name" ASC
) AS row_number
FROM
"Track" AS "Track1"
JOIN
"PlaylistTrack" ON
"PlaylistTrack"."TrackId" =
"Track1"."TrackId"
WHERE
"Track1"."TrackId" >= ?
) AS q1
WHERE
q1.row_number >= ?
AND
q1.row_number <= ?
) AS "Track1" ON
anon_1."Playlist_PlaylistId" =
"Track1"."PlaylistTrack_PlaylistId"
LEFT OUTER JOIN (
SELECT
q1."Playlist1_PlaylistId" AS "Playlist1_PlaylistId",
q1."Playlist1_Name" AS "Playlist1_Name",
q1."PlaylistTrack_PlaylistId" AS
"PlaylistTrack_PlaylistId",
q1."PlaylistTrack_TrackId" AS "PlaylistTrack_TrackId",
q1.row_number AS row_number
FROM
(
SELECT
"Playlist1"."PlaylistId" AS
"Playlist1_PlaylistId",
"Playlist1"."Name" AS "Playlist1_Name",
"PlaylistTrack"."PlaylistId" AS
"PlaylistTrack_PlaylistId",
"PlaylistTrack"."TrackId" AS
"PlaylistTrack_TrackId",
row_number() OVER (
PARTITION BY
"PlaylistTrack"."TrackId"
ORDER BY
"Playlist1"."Name" ASC
) AS row_number
FROM
"Playlist" AS "Playlist1"
JOIN
"PlaylistTrack" ON
"PlaylistTrack"."PlaylistId" =
"Playlist1"."PlaylistId"
WHERE
"Playlist1"."PlaylistId" <= ?
) AS q1
WHERE
q1.row_number >= ?
AND
q1.row_number <= ?
) AS "Playlist1" ON
"Playlist1"."PlaylistTrack_TrackId" =
"Track1"."Track1_TrackId"
WHERE
anon_1.row_number >= ?
AND
anon_1.row_number <= ?
ORDER BY
anon_1.row_number
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
@staticmethod
def test_subresource_bad_dialect_fail(db_session):
"""Test a sublimit/offset fails with unsupported dialect."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
offset=1,
limit=10
)
}
with raises(BadRequestError) as excinf:
query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(
session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=False
)
assert excinf.value.code == "invalid_subresource_options"
@staticmethod
def test_composite_root_limit_with_subquery_limit(db_session):
"""Apply limit to both root + subresource with composite id."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(CompositeOne)
subfilters = {
"many": SubfilterInfo(
filters={"many_id": 1},
offset=1,
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=CompositeOneResource(session=db_session),
subfilters=subfilters,
limit=1,
offset=1,
embeds=[],
dialect_override=True
)
expected_query = (
"""
SELECT
anon_1."CompositeOne_OneId" AS "anon_1_CompositeOne_OneId",
anon_1."CompositeOne_CompositeOneId" AS
"anon_1_CompositeOne_CompositeOneId",
"CompositeMany1"."CompositeMany1_ManyId" AS
"CompositeMany1_CompositeMany1_ManyId",
"CompositeMany1"."CompositeMany1_OneId" AS
"CompositeMany1_CompositeMany1_OneId",
"CompositeMany1"."CompositeMany1_CompositeOneId" AS
"CompositeMany1_CompositeMany1_CompositeOneId"
FROM
(
SELECT
"CompositeOne"."OneId" AS "CompositeOne_OneId",
"CompositeOne"."CompositeOneId" AS
"CompositeOne_CompositeOneId",
row_number() OVER (
ORDER BY
"CompositeOne"."OneId" ASC,
"CompositeOne"."CompositeOneId" ASC
) AS row_number
FROM
"CompositeOne"
) AS anon_1
LEFT OUTER JOIN (
SELECT
q1."CompositeMany1_ManyId" AS "CompositeMany1_ManyId",
q1."CompositeMany1_OneId" AS "CompositeMany1_OneId",
q1."CompositeMany1_CompositeOneId" AS
"CompositeMany1_CompositeOneId",
q1.row_number AS row_number
FROM
(
SELECT
"CompositeMany1"."ManyId" AS
"CompositeMany1_ManyId",
"CompositeMany1"."OneId" AS
"CompositeMany1_OneId",
"CompositeMany1"."CompositeOneId" AS
"CompositeMany1_CompositeOneId",
row_number() OVER (
PARTITION BY
"CompositeMany1"."OneId",
"CompositeMany1"."CompositeOneId"
ORDER BY
"CompositeMany1"."ManyId" ASC
) AS row_number
FROM
"CompositeMany" AS "CompositeMany1"
WHERE
"CompositeMany1"."ManyId" = ?
) AS q1
WHERE
q1.row_number >= ?
AND q1.row_number <= ?
) AS "CompositeMany1" ON
anon_1."CompositeOne_OneId" =
"CompositeMany1"."CompositeMany1_OneId"
AND
anon_1."CompositeOne_CompositeOneId" =
"CompositeMany1"."CompositeMany1_CompositeOneId"
WHERE
anon_1.row_number >= ?
AND
anon_1.row_number <= ?
ORDER BY
anon_1.row_number
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
@staticmethod
def test_composite_id_subquery_one_to_many(db_session):
"""Test a composite id subquery with a many to one relation."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(CompositeOne)
subfilters = {
"many": SubfilterInfo(
filters={"many_id": 1},
offset=1,
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=CompositeOneResource(session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True
)
expected_query = (
"""
SELECT
"CompositeOne"."OneId" AS "CompositeOne_OneId",
"CompositeOne"."CompositeOneId" AS
"CompositeOne_CompositeOneId",
"CompositeMany1"."CompositeMany1_ManyId" AS
"CompositeMany1_CompositeMany1_ManyId",
"CompositeMany1"."CompositeMany1_OneId" AS
"CompositeMany1_CompositeMany1_OneId",
"CompositeMany1"."CompositeMany1_CompositeOneId" AS
"CompositeMany1_CompositeMany1_CompositeOneId"
FROM
"CompositeOne"
LEFT OUTER JOIN (
SELECT
q1."CompositeMany1_ManyId" AS "CompositeMany1_ManyId",
q1."CompositeMany1_OneId" AS "CompositeMany1_OneId",
q1."CompositeMany1_CompositeOneId" AS
"CompositeMany1_CompositeOneId",
q1.row_number AS row_number
FROM
(
SELECT
"CompositeMany1"."ManyId" AS
"CompositeMany1_ManyId",
"CompositeMany1"."OneId" AS
"CompositeMany1_OneId",
"CompositeMany1"."CompositeOneId" AS
"CompositeMany1_CompositeOneId",
row_number() OVER (
PARTITION BY
"CompositeMany1"."OneId",
"CompositeMany1"."CompositeOneId"
ORDER BY
"CompositeMany1"."ManyId" ASC
) AS row_number
FROM
"CompositeMany" AS "CompositeMany1"
WHERE
"CompositeMany1"."ManyId" = ?
) AS q1
WHERE
q1.row_number >= ?
AND q1.row_number <= ?
) AS "CompositeMany1" ON
"CompositeOne"."OneId" =
"CompositeMany1"."CompositeMany1_OneId"
AND
"CompositeOne"."CompositeOneId" =
"CompositeMany1"."CompositeMany1_CompositeOneId"
ORDER BY
"CompositeOne"."OneId" ASC,
"CompositeOne"."CompositeOneId" ASC
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
@staticmethod
def test_self_ref_one_to_many_limit(db_session):
"""Self referential one to many subquery with a limit"""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Employee)
subfilters = {
"subordinates": SubfilterInfo(
filters={"employee_id": {"$nin": [1, 2]}},
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=EmployeeResource(session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True
)
expected_query = (
"""
SELECT
"Employee"."EmployeeId" AS "Employee_EmployeeId",
"Employee"."LastName" AS "Employee_LastName",
"Employee"."FirstName" AS "Employee_FirstName",
"Employee"."Title" AS "Employee_Title",
"Employee"."ReportsTo" AS "Employee_ReportsTo",
"Employee"."BirthDate" AS "Employee_BirthDate",
"Employee"."HireDate" AS "Employee_HireDate",
"Employee"."Address" AS "Employee_Address",
"Employee"."City" AS "Employee_City",
"Employee"."State" AS "Employee_State",
"Employee"."Country" AS "Employee_Country",
"Employee"."PostalCode" AS "Employee_PostalCode",
"Employee"."Phone" AS "Employee_Phone",
"Employee"."Fax" AS "Employee_Fax",
"Employee"."Email" AS "Employee_Email",
"Employee1"."Employee1_EmployeeId" AS
"Employee1_Employee1_EmployeeId",
"Employee1"."Employee1_LastName" AS
"Employee1_Employee1_LastName",
"Employee1"."Employee1_FirstName" AS
"Employee1_Employee1_FirstName",
"Employee1"."Employee1_Title" AS "Employee1_Employee1_Title",
"Employee1"."Employee1_ReportsTo" AS
"Employee1_Employee1_ReportsTo",
"Employee1"."Employee1_BirthDate" AS
"Employee1_Employee1_BirthDate",
"Employee1"."Employee1_HireDate" AS
"Employee1_Employee1_HireDate",
"Employee1"."Employee1_Address" AS
"Employee1_Employee1_Address",
"Employee1"."Employee1_City" AS "Employee1_Employee1_City",
"Employee1"."Employee1_State" AS "Employee1_Employee1_State",
"Employee1"."Employee1_Country" AS
"Employee1_Employee1_Country",
"Employee1"."Employee1_PostalCode" AS
"Employee1_Employee1_PostalCode",
"Employee1"."Employee1_Phone" AS "Employee1_Employee1_Phone",
"Employee1"."Employee1_Fax" AS "Employee1_Employee1_Fax",
"Employee1"."Employee1_Email" AS "Employee1_Employee1_Email"
FROM
"Employee"
LEFT OUTER JOIN (
SELECT
q1."Employee1_EmployeeId" AS "Employee1_EmployeeId",
q1."Employee1_LastName" AS "Employee1_LastName",
q1."Employee1_FirstName" AS "Employee1_FirstName",
q1."Employee1_Title" AS "Employee1_Title",
q1."Employee1_ReportsTo" AS "Employee1_ReportsTo",
q1."Employee1_BirthDate" AS "Employee1_BirthDate",
q1."Employee1_HireDate" AS "Employee1_HireDate",
q1."Employee1_Address" AS "Employee1_Address",
q1."Employee1_City" AS "Employee1_City",
q1."Employee1_State" AS "Employee1_State",
q1."Employee1_Country" AS "Employee1_Country",
q1."Employee1_PostalCode" AS "Employee1_PostalCode",
q1."Employee1_Phone" AS "Employee1_Phone",
q1."Employee1_Fax" AS "Employee1_Fax",
q1."Employee1_Email" AS "Employee1_Email",
q1.row_number AS row_number
FROM
(
SELECT
"Employee1"."EmployeeId" AS
"Employee1_EmployeeId",
"Employee1"."LastName" AS "Employee1_LastName",
"Employee1"."FirstName" AS
"Employee1_FirstName",
"Employee1"."Title" AS "Employee1_Title",
"Employee1"."ReportsTo" AS
"Employee1_ReportsTo",
"Employee1"."BirthDate" AS
"Employee1_BirthDate",
"Employee1"."HireDate" AS "Employee1_HireDate",
"Employee1"."Address" AS "Employee1_Address",
"Employee1"."City" AS "Employee1_City",
"Employee1"."State" AS "Employee1_State",
"Employee1"."Country" AS "Employee1_Country",
"Employee1"."PostalCode" AS
"Employee1_PostalCode",
"Employee1"."Phone" AS "Employee1_Phone",
"Employee1"."Fax" AS "Employee1_Fax",
"Employee1"."Email" AS "Employee1_Email",
row_number() OVER (
PARTITION BY "Employee1"."ReportsTo"
ORDER BY
"Employee1"."EmployeeId" ASC
) AS row_number
FROM
"Employee" AS "Employee1"
WHERE
"Employee1"."EmployeeId" NOT IN (?, ?)
) AS q1
WHERE
q1.row_number >= ?
AND q1.row_number <= ?
) AS "Employee1" ON
"Employee"."EmployeeId" = "Employee1"."Employee1_ReportsTo"
ORDER BY
"Employee"."EmployeeId" ASC
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
@staticmethod
def test_bad_subfilter_ignore_with_limit(db_session):
"""Bad subfilter using a limit gets ignored when not strict."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$bad": 5}},
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[],
dialect_override=True,
strict=False
)
expected_query = (
"""
SELECT
"Album"."AlbumId" AS "Album_AlbumId",
"Album"."Title" AS "Album_Title",
"Album"."ArtistId" AS "Album_ArtistId",
"Track1"."Track1_TrackId" AS "Track1_Track1_TrackId",
"Track1"."Track1_Name" AS "Track1_Track1_Name",
"Track1"."Track1_AlbumId" AS "Track1_Track1_AlbumId",
"Track1"."Track1_MediaTypeId" AS "Track1_Track1_MediaTypeId",
"Track1"."Track1_GenreId" AS "Track1_Track1_GenreId",
"Track1"."Track1_Composer" AS "Track1_Track1_Composer",
"Track1"."Track1_Milliseconds" AS "Track1_Track1_Milliseconds",
"Track1"."Track1_Bytes" AS "Track1_Track1_Bytes",
"Track1"."Track1_UnitPrice" AS "Track1_Track1_UnitPrice"
FROM
"Album"
LEFT OUTER JOIN (
SELECT
q1."Track1_TrackId" AS "Track1_TrackId",
q1."Track1_Name" AS "Track1_Name",
q1."Track1_AlbumId" AS "Track1_AlbumId",
q1."Track1_MediaTypeId" AS "Track1_MediaTypeId",
q1."Track1_GenreId" AS "Track1_GenreId",
q1."Track1_Composer" AS "Track1_Composer",
q1."Track1_Milliseconds" AS "Track1_Milliseconds",
q1."Track1_Bytes" AS "Track1_Bytes",
q1."Track1_UnitPrice" AS "Track1_UnitPrice",
q1.row_number AS row_number
FROM
(
SELECT
"Track1"."TrackId" AS "Track1_TrackId",
"Track1"."Name" AS "Track1_Name",
"Track1"."AlbumId" AS "Track1_AlbumId",
"Track1"."MediaTypeId" AS "Track1_MediaTypeId",
"Track1"."GenreId" AS "Track1_GenreId",
"Track1"."Composer" AS "Track1_Composer",
"Track1"."Milliseconds" AS
"Track1_Milliseconds",
"Track1"."Bytes" AS "Track1_Bytes",
"Track1"."UnitPrice" AS "Track1_UnitPrice",
row_number() OVER (
PARTITION BY "Track1"."AlbumId"
ORDER BY
"Track1"."TrackId" ASC
) AS row_number
FROM
"Track" AS "Track1"
) AS q1
WHERE
q1.row_number >= ?
AND q1.row_number <= ?
) AS "Track1" ON "Album"."AlbumId" = "Track1"."Track1_AlbumId"
ORDER BY
"Album"."AlbumId" ASC
"""
).replace(" ", "").replace("\n", "")
result = str(query).replace(" ", "").replace("\n", "")
assert expected_query == result
class TestDrowsyQueryBuilderRowNumSupport(DrowsyDatabaseTests):
"""Query builder tests for dialects supporting row_number."""
backends = ['mssql', 'postgres']
@staticmethod
def test_root_and_nested_limit_offset(db_session):
"""Test offset and limit in both root and nested collections."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 15}},
offset=1,
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[],
limit=10,
offset=1
)
results = query.all()
assert len(results) == 10
for album in results:
assert len(album.tracks) <= 1
for track in album.tracks:
# root offset check
assert album.album_id != 1
if album.album_id == 2:
# subresource offset check
assert track.track_id == 16
assert track.track_id >= 5
@staticmethod
def test_simple_subfilter_limit_offset(db_session):
"""Test offset and limit in a subresource."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 5}},
offset=1,
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[]
)
results = query.all()
# offset test
assert results[0].album_id == 1
assert results[0].tracks[0].track_id == 7
for album in results:
assert len(album.tracks) <= 1
for track in album.tracks:
assert track.track_id >= 5
@staticmethod
def test_subfilter_limit_offset_sorts(db_session):
"""Test subfiltering with sorts works with limit and offset."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 5}},
offset=1,
limit=1,
sorts=[SortInfo(attr="name", direction="ASC")]
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[]
)
results = query.all()
assert len(results) == 347
for album in results:
# limit test
assert len(album.tracks) <= 1
if album.album_id == 1:
# offset test
assert album.tracks[0].name == "C.O.D."
@staticmethod
def test_non_strict_bad_sublimits(db_session):
"""Test bad sublimits don't cause failure when not strict."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Customer)
query = query_builder.apply_subquery_loads(
query=query,
resource=CustomerResource(session=db_session),
subfilters={
"invoices": SubfilterInfo(
offset=1,
limit=10000
)
},
embeds=[],
strict=False
)
results = query.all()
assert len(results) == 59
assert results[0].customer_id == 1
# offset check
assert len(results[0].invoices) == 6
assert results[0].invoices[0].invoice_id == 121
@staticmethod
def test_self_ref_composite_id_subquery_with_limit(db_session):
"""Self referential a composite id subquery with a limit"""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(CompositeNode).filter(CompositeNode.node_id == 1)
subfilters = {
"children": SubfilterInfo(
filters={"node_id": {"$in": [1, 2]}},
offset=1,
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=CompositeNodeResource(session=db_session),
subfilters=subfilters,
embeds=[]
)
results = query.all()
assert len(results) == 1
assert results[0].node_id == 1
assert results[0].children[0].node_id == 2
@staticmethod
def test_multilevel_subfilter_limit(db_session):
"""Test subfiltering with sorts works with limit and offset."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 5}},
limit=4,
sorts=[SortInfo(attr="name", direction="ASC")]
),
"tracks.playlists": SubfilterInfo(
filters={"playlist_id": {"$gte": 6}},
limit=5,
sorts=[SortInfo(attr="name", direction="ASC")]
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[],
limit=3
)
results = query.all()
assert len(results) == 3
for album in results:
# limit test
assert len(album.tracks) <= 4
for track in album.tracks:
assert track.track_id >= 5
# limit test
assert len(track.playlists) <= 5
for playlist in track.playlists:
assert playlist.playlist_id >= 6
@staticmethod
def test_many_to_many_subresource_limit(db_session):
"""Many to many relationships with limits loaded properly."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Playlist)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$gte": 5}},
limit=5,
sorts=[SortInfo(attr="name", direction="ASC")]
),
"tracks.playlists": SubfilterInfo(
filters={"playlist_id": {"$lte": 6}},
limit=4,
sorts=[SortInfo(attr="name", direction="ASC")]
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=PlaylistResource(session=db_session),
subfilters=subfilters,
embeds=[],
limit=3
)
results = query.all()
assert len(results) == 3
for playlist in results:
assert len(playlist.tracks) <= 5
for track in playlist.tracks:
assert track.track_id >= 5
assert len(track.playlists) <= 4
for pl in track.playlists:
assert pl.playlist_id <= 6
@staticmethod
def test_self_ref_one_to_many_limit(db_session):
"""Self referential one to many subquery with a limit"""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Employee)
subfilters = {
"subordinates": SubfilterInfo(
filters={"employee_id": {"$nin": [1, 2]}},
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=EmployeeResource(session=db_session),
subfilters=subfilters,
embeds=[]
)
results = query.all()
assert len(results) == 8
for manager in results:
assert len(manager.subordinates) <= 1
for subordinate in manager.subordinates:
assert subordinate.employee_id not in [1, 2]
@staticmethod
def test_bad_subfilter_ignore_with_limit(db_session):
"""Bad subfilter using a limit gets ignored when not strict."""
query_builder = ModelResourceQueryBuilder()
query = db_session.query(Album)
subfilters = {
"tracks": SubfilterInfo(
filters={"track_id": {"$bad": 5}},
limit=1
)
}
query = query_builder.apply_subquery_loads(
query=query,
resource=AlbumResource(session=db_session),
subfilters=subfilters,
embeds=[],
strict=False
)
results = query.all()
assert len(results) > 0
for album in results:
assert len(album.tracks) <= 1
| 42.284549 | 82 | 0.485255 | 7,278 | 93,322 | 5.998076 | 0.043144 | 0.031887 | 0.018922 | 0.025244 | 0.83344 | 0.746026 | 0.724012 | 0.717208 | 0.704975 | 0.687818 | 0 | 0.023525 | 0.427895 | 93,322 | 2,206 | 83 | 42.303717 | 0.794119 | 0.03848 | 0 | 0.73961 | 0 | 0 | 0.038489 | 0.006509 | 0 | 0 | 0 | 0.000453 | 0.083969 | 1 | 0.049194 | false | 0 | 0.007634 | 0 | 0.061069 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
753cfaf769eed9a30a20814afde31603ab7ae99d | 108 | py | Python | timm/data/sdata/__init__.py | ddp5730/Non-binary-deep-transfer-learning-for-image-classification | affb243b0939f82bf364ed9c4c203e203760082c | [
"BSD-Source-Code"
] | 5 | 2021-09-06T07:08:14.000Z | 2021-12-16T08:00:50.000Z | timm/data/sdata/__init__.py | ddp5730/Non-binary-deep-transfer-learning-for-image-classification | affb243b0939f82bf364ed9c4c203e203760082c | [
"BSD-Source-Code"
] | 1 | 2021-10-05T12:52:31.000Z | 2021-10-05T12:52:31.000Z | timm/data/sdata/__init__.py | ddp5730/Non-binary-deep-transfer-learning-for-image-classification | affb243b0939f82bf364ed9c4c203e203760082c | [
"BSD-Source-Code"
] | 1 | 2021-10-05T12:15:11.000Z | 2021-10-05T12:15:11.000Z | from .aircraft import *
from .dtd import *
from .air_dtd import *
from .caltech import *
from .cars import * | 21.6 | 23 | 0.731481 | 16 | 108 | 4.875 | 0.4375 | 0.512821 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.175926 | 108 | 5 | 24 | 21.6 | 0.876404 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f33c41074c42ffcc8a95885437b6c3f37c843c67 | 270 | py | Python | dfs/datasheets/datatabs/__init__.py | shoeberto/dfs-data-tooling | fe1be744b343f18d845ddc20f2303d1dbf0d90c5 | [
"MIT"
] | null | null | null | dfs/datasheets/datatabs/__init__.py | shoeberto/dfs-data-tooling | fe1be744b343f18d845ddc20f2303d1dbf0d90c5 | [
"MIT"
] | null | null | null | dfs/datasheets/datatabs/__init__.py | shoeberto/dfs-data-tooling | fe1be744b343f18d845ddc20f2303d1dbf0d90c5 | [
"MIT"
] | null | null | null | import dfs.datasheets.datatabs.general
import dfs.datasheets.datatabs.witnesstree
import dfs.datasheets.datatabs.cover
import dfs.datasheets.datatabs.sapling
import dfs.datasheets.datatabs.tree
import dfs.datasheets.datatabs.seedling
import dfs.datasheets.datatabs.notes | 38.571429 | 42 | 0.874074 | 35 | 270 | 6.742857 | 0.314286 | 0.266949 | 0.563559 | 0.800847 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.048148 | 270 | 7 | 43 | 38.571429 | 0.918288 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f3ca647e2d0cc89b35fa8cbd7e2598cafb0ac28a | 59,009 | py | Python | flowcelltool/flowcells/tests/test_views.py | bihealth/flowcelltool | 6e16190fc34c54d834ecd23888a462f3af47611d | [
"MIT"
] | 7 | 2016-10-12T12:56:09.000Z | 2020-10-27T17:08:09.000Z | flowcelltool/flowcells/tests/test_views.py | iamh2o/flowcelltool | 6e16190fc34c54d834ecd23888a462f3af47611d | [
"MIT"
] | 94 | 2016-10-24T06:28:31.000Z | 2018-08-06T10:35:13.000Z | flowcelltool/flowcells/tests/test_views.py | iamh2o/flowcelltool | 6e16190fc34c54d834ecd23888a462f3af47611d | [
"MIT"
] | 1 | 2022-03-23T15:57:16.000Z | 2022-03-23T15:57:16.000Z | # -*- coding: utf-8 -*-
"""Tests for the views from the flowcelltools Django app
"""
import datetime
import io
import textwrap
from unittest.mock import patch, ANY
from test_plus.test import TestCase
from django.urls import reverse
from django.forms.models import model_to_dict
from django.test import Client
from django.contrib.contenttypes.models import ContentType
from .. import models
from ..models import SequencingMachine, FlowCell, BarcodeSet, \
BarcodeSetEntry, Library
from ...threads import models as threads_models
from .test_models import SequencingMachineMixin, FlowCellMixin, \
BarcodeSetMixin, BarcodeSetEntryMixin, LibraryMixin
# Helper Classes ---------------------------------------------------------
class SuperUserTestCase(TestCase):
def make_user(self, *args, **kwargs):
kwargs.setdefault('username', 'testuser')
kwargs.setdefault('password', 'password')
user = super().make_user(*args, **kwargs)
user.is_superuser = True
user.save()
return user
# FlowCell related -------------------------------------------------------
class TestFlowCellListView(
SuperUserTestCase, FlowCellMixin, SequencingMachineMixin):
def setUp(self):
self.user = self.make_user()
self.machine = self._make_machine()
self.flow_cell = self._make_flow_cell(
self.user, datetime.date(2016, 3, 3), self.machine, 815, 'A',
'BCDEFGHIXX', 'LABEL', 8, models.FLOWCELL_STATUS_SEQ_COMPLETE,
'John Doe', True, 1, models.RTA_VERSION_V2, 151, 'Description')
def test_render(self):
"""Simply test that rendering the list view works"""
with self.login(self.user):
response = self.client.get(reverse('flowcell_list'))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['flowcell_list']), 1)
class TestFlowCellCreateView(
SuperUserTestCase, FlowCellMixin, SequencingMachineMixin):
def setUp(self):
self.email_patcher = patch(
'flowcelltool.flowcells.emails.email_flowcell_created')
self.email_mock = self.email_patcher.start()
self.user = self.make_user()
self.machine = self._make_machine()
self.flow_cell_name = '160303_{}_0815_A_BCDEFGHIXX_LABEL'.format(
self.machine.vendor_id)
self.client = Client()
def tearDown(self):
self.email_patcher.stop()
def test_render(self):
"""Simply test that post inserts a new flow cell and redirects to the
list view
"""
# Check precondition
self.assertEqual(FlowCell.objects.all().count(), 0)
# Simulate POST request
values = {
'name': self.flow_cell_name,
'num_lanes': 8,
'status': models.FLOWCELL_STATUS_INITIAL,
'operator': 'John Doe',
'is_paired': True,
'index_read_count': 1,
'rta_version': models.RTA_VERSION_V2,
'read_length': 151,
}
# Simulate the POST
with self.login(self.user):
response = self.client.post(reverse('flowcell_create'), values)
# Check resulting database state
self.assertEqual(FlowCell.objects.all().count(), 1)
flow_cell = FlowCell.objects.all()[0]
self.assertIsNotNone(flow_cell)
EXPECTED = {
'id': flow_cell.pk,
'description': '',
'owner': self.user.pk,
'num_lanes': 8,
'status': models.FLOWCELL_STATUS_INITIAL,
'operator': 'John Doe',
'demux_operator': None,
'info_adapters': None,
'info_quality_scores': None,
'info_final_reads': None,
'info_planned_reads': None,
'rta_version': models.RTA_VERSION_V2,
'sequencing_machine': self.machine.pk,
'vendor_id': '',
'run_date': datetime.date(2016, 3, 3),
'run_number': 815,
'label': 'LABEL',
'slot': 'A',
'vendor_id': 'BCDEFGHIXX',
}
self.assertEqual(model_to_dict(flow_cell), EXPECTED)
# Check call to sending emails
self.email_mock.assert_called_once_with(self.user, flow_cell, ANY)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('flowcell_view', kwargs={'uuid': flow_cell.uuid}))
class TestFlowCellDetailView(
SuperUserTestCase, FlowCellMixin, SequencingMachineMixin):
def setUp(self):
self.user = self.make_user()
self.machine = self._make_machine()
self.client = Client()
self.flow_cell = self._make_flow_cell(
self.user, datetime.date(2016, 3, 3), self.machine, 815, 'A',
'BCDEFGHIXX', 'LABEL', 8, models.FLOWCELL_STATUS_SEQ_COMPLETE,
'John Doe', True, 1, models.RTA_VERSION_V2, 151, 'Description')
def test_render(self):
"""Simply test that rendering the detail view works"""
# Simulate the GET
with self.login(self.user):
response = self.client.get(
reverse('flowcell_view', kwargs={'uuid': self.flow_cell.uuid}))
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['object'].pk,
self.flow_cell.pk)
class TestFlowCellUpdateView(
SuperUserTestCase, FlowCellMixin, SequencingMachineMixin):
def setUp(self):
self.email_patcher = patch(
'flowcelltool.flowcells.emails.email_flowcell_updated')
self.email_mock = self.email_patcher.start()
self.user = self.make_user()
self.machine = self._make_machine()
self.client = Client()
self.flow_cell = self._make_flow_cell(
self.user, datetime.date(2016, 3, 3), self.machine, 815, 'A',
'BCDEFGHIXX', 'LABEL', 8, models.FLOWCELL_STATUS_SEQ_COMPLETE,
'John Doe', True, 1, models.RTA_VERSION_V2, 151, 'Description')
def tearDown(self):
self.email_patcher.stop()
def test_render(self):
"""Test that the flow cell update POST works"""
# Check precondition
self.assertEqual(FlowCell.objects.all().count(), 1)
# Simulate POST request
values = model_to_dict(self.flow_cell)
values['demux_operator'] = ''
values['name'] = self.flow_cell.get_full_name() + 'YADAYADAYADA'
values['status'] = models.FLOWCELL_STATUS_DEMUX_COMPLETE
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('flowcell_update', kwargs={'uuid': self.flow_cell.uuid}),
values)
# Check resulting database state
self.assertEqual(FlowCell.objects.all().count(), 1)
flow_cell = FlowCell.objects.all()[0]
self.assertIsNotNone(flow_cell)
EXPECTED = {
'id': flow_cell.pk,
'label': 'LABELYADAYADAYADA',
'description': 'Description',
'owner': self.user.pk,
'num_lanes': 8,
'status': models.FLOWCELL_STATUS_DEMUX_COMPLETE,
'operator': 'John Doe',
'demux_operator': None,
'info_adapters': None,
'info_quality_scores': None,
'info_final_reads': None,
'info_planned_reads': [
{'is_indexed_read': False, 'num_cycles': 151, 'number': 1},
{'is_indexed_read': False, 'num_cycles': 151, 'number': 2},
{'is_indexed_read': True, 'num_cycles': 8, 'number': 3},
],
'rta_version': models.RTA_VERSION_V2,
'sequencing_machine': self.machine.pk,
'slot': 'A',
'run_number': 815,
'vendor_id': 'BCDEFGHIXX',
'run_date': datetime.date(2016, 3, 3),
}
self.maxDiff = None
self.assertEqual(model_to_dict(flow_cell), EXPECTED)
# Check call to sending emails
self.email_mock.assert_called_once_with(self.user, flow_cell, ANY)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('flowcell_view', kwargs={'uuid': flow_cell.uuid}))
class TestFlowCellDeleteView(
SuperUserTestCase, FlowCellMixin, SequencingMachineMixin):
def setUp(self):
self._set_up_mock()
self.user = self.make_user()
self.machine = self._make_machine()
self.client = Client()
self.flow_cell = self._make_flow_cell(
self.user, datetime.date(2016, 3, 3), self.machine, 815, 'A',
'BCDEFGHIXX', 'LABEL', 8, models.FLOWCELL_STATUS_SEQ_COMPLETE,
'John Doe', True, 1, models.RTA_VERSION_V2, 151, 'Description')
def _set_up_mock(self):
# The argument FlowCell is passed after deletion and thus it is not
# equal to self.flow_cell any more. Thus, the trickery with storing
# it in an attribut eof the test.
self.arg_flowcell = None
self.email_patcher = patch(
'flowcelltool.flowcells.emails.email_flowcell_deleted')
self.email_mock = self.email_patcher.start()
def save_flowcell(user, flowcell):
self.arg_flowcell = flowcell
self.email_mock.side_effect = save_flowcell
def tearDown(self):
self.email_patcher.stop()
def test_render(self):
"""Test that the flow cell delete POST works"""
# Check precondition
self.assertEqual(FlowCell.objects.all().count(), 1)
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('flowcell_delete', kwargs={'uuid': self.flow_cell.uuid}))
# Check resulting database state
self.assertEqual(FlowCell.objects.all().count(), 0)
# Check call to sending emails
self.email_mock.assert_called_once_with(self.user, ANY)
m1 = model_to_dict(self.arg_flowcell)
del m1['id']
m2 = model_to_dict(self.flow_cell)
del m2['id']
self.assertEqual(m1, m2)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('flowcell_list'))
class TestLibraryUpdateView(
SuperUserTestCase, FlowCellMixin, SequencingMachineMixin, LibraryMixin,
BarcodeSetMixin, BarcodeSetEntryMixin):
def setUp(self):
self.user = self.make_user()
self.client = Client()
# Create Machine
self.machine = self._make_machine()
# Create Barcode set
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATATA')
# Create Flow cell
self.flow_cell = self._make_flow_cell(
self.user, datetime.date(2016, 3, 3), self.machine, 815, 'A',
'BCDEFGHIXX', 'LABEL', 8, models.FLOWCELL_STATUS_SEQ_COMPLETE,
'John Doe', True, 1, models.RTA_VERSION_V2, 151, 'Description')
self.library1 = self._make_library(
self.flow_cell, 'LIB_001', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode1, [1, 2], None, None)
self.library2 = self._make_library(
self.flow_cell, 'LIB_002', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode2, [1, 2], None, None)
def _test_update(self, more_values):
"""Helper for testing the update functionality"""
# Check precondition
self.assertEqual(FlowCell.objects.all().count(), 1)
self.assertEqual(Library.objects.all().count(), 2)
values = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '2',
'form-MIN_NUM_FORMS': '0',
'form-MAX_NUM_FORMS': '1',
'form-0-id': self.library1.pk,
'form-0-name': 'UPDATED',
'form-0-reference': 'mm9',
'form-0-barcode_set': self.library1.barcode_set.uuid,
'form-0-barcode': self.library1.barcode.uuid,
'form-0-barcode_set2': '',
'form-0-barcode2': '',
'form-0-lane_numbers': ','.join(
map(str, self.library1.lane_numbers)),
'form-1-id': self.library2.pk,
'form-1-name': 'UPDATED_2',
'form-1-reference': self.library2.reference,
'form-1-barcode_set': self.library2.barcode_set.uuid,
'form-1-barcode': self.library2.barcode.uuid,
'form-1-barcode_set2': '',
'form-1-barcode2': '',
'form-1-lane_numbers': ','.join(
map(str, self.library2.lane_numbers)),
}
values.update(more_values)
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('flowcell_updatelibraries',
kwargs={'uuid': self.flow_cell.uuid}),
values)
# Check resulting database state
self.assertEqual(BarcodeSet.objects.all().count(), 1)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 2)
library1 = Library.objects.get(pk=self.library1.pk)
self.assertEquals(library1.name, 'UPDATED')
self.assertEquals(library1.reference, 'mm9')
self.assertEquals(library1.barcode_set, self.barcode_set)
self.assertEquals(library1.barcode, self.barcode1)
self.assertEquals(library1.barcode_set2, None)
self.assertEquals(library1.barcode2, None)
self.assertEquals(library1.lane_numbers, [1, 2])
library2 = Library.objects.get(pk=self.library2.pk)
self.assertEquals(library2.name, 'UPDATED_2')
self.assertEquals(library2.reference, self.library2.reference)
self.assertEquals(library2.barcode_set, self.barcode_set)
self.assertEquals(library2.barcode, self.barcode2)
self.assertEquals(library2.barcode_set2, None)
self.assertEquals(library2.barcode2, None)
self.assertEquals(library2.lane_numbers, [1, 2])
return response
def test_update(self):
"""Test that updating library entries works correctly"""
response = self._test_update({'submit': 'submit'})
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('flowcell_view',
kwargs={'uuid': self.flow_cell.uuid}))
def test_update_more(self):
"""Test that updating library entries works correctly (submit more)"""
response = self._test_update({'submit_more': 'submit_more'})
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('flowcell_updatelibraries',
kwargs={'uuid': self.flow_cell.uuid}))
def test_add(self):
"""Test that adding libraries works correctly"""
# Check precondition
self.assertEqual(FlowCell.objects.all().count(), 1)
self.assertEqual(Library.objects.all().count(), 2)
values = {
'form-TOTAL_FORMS': '3',
'form-INITIAL_FORMS': '2',
'form-MIN_NUM_FORMS': '0',
'form-MAX_NUM_FORMS': '2',
'form-0-id': self.library1.pk,
'form-0-name': self.library1.name,
'form-0-reference': self.library1.reference,
'form-0-barcode_set': self.library1.barcode_set.uuid,
'form-0-barcode': self.library1.barcode.uuid,
'form-0-barcode_set2': '',
'form-0-barcode2': '',
'form-0-lane_numbers': ','.join(
map(str, self.library1.lane_numbers)),
'form-1-id': self.library2.pk,
'form-1-name': self.library2.name,
'form-1-reference': self.library2.reference,
'form-1-barcode_set': self.library2.barcode_set.uuid,
'form-1-barcode': self.library2.barcode.uuid,
'form-1-barcode_set2': '',
'form-1-barcode2': '',
'form-1-lane_numbers': ','.join(
map(str, self.library2.lane_numbers)),
'form-2-name': 'LIB_003',
'form-2-reference': 'hg19',
'form-2-barcode_set': self.library2.barcode_set.uuid,
'form-2-barcode': self.library2.barcode.uuid,
'form-2-barcode_set2': '',
'form-2-barcode2': '',
'form-2-lane_numbers': '5,6',
}
# Ensure that no such barcode exists yet
self.assertEquals(
Library.objects.filter(name='LIB_003').count(), 0)
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('flowcell_updatelibraries',
kwargs={'uuid': self.flow_cell.uuid}),
values)
# Check resulting database state
self.assertEqual(FlowCell.objects.all().count(), 1)
self.assertEqual(Library.objects.all().count(), 3)
library1 = Library.objects.get(pk=self.library1.pk)
self.assertEquals(library1.name, self.library1.name)
library2 = Library.objects.get(pk=self.library2.pk)
self.assertEquals(library2.name, self.library2.name)
# Newly created library
self.assertEquals(
Library.objects.filter(name='LIB_003').count(), 1)
library3 = Library.objects.filter(name='LIB_003')[0]
self.assertEquals(library3.name, 'LIB_003')
self.assertEquals(library3.reference, 'hg19')
self.assertEquals(library3.barcode_set, self.barcode_set)
self.assertEquals(library3.barcode, self.barcode2)
self.assertEquals(library3.barcode_set2, None)
self.assertEquals(library3.barcode2, None)
self.assertEquals(library3.lane_numbers, [5, 6])
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('flowcell_view',
kwargs={'uuid': self.flow_cell.uuid}))
def test_delete(self):
"""Test that deleting libraries works correctly"""
# Check precondition
self.assertEqual(FlowCell.objects.all().count(), 1)
self.assertEqual(Library.objects.all().count(), 2)
values = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '2',
'form-MIN_NUM_FORMS': '0',
'form-MAX_NUM_FORMS': '1',
'form-0-id': self.library1.pk,
'form-0-name': 'UPDATED',
'form-0-reference': 'mm9',
'form-0-barcode_set': self.library1.barcode_set.uuid,
'form-0-barcode': self.library1.barcode.uuid,
'form-0-barcode_set2': '',
'form-0-barcode2': '',
'form-0-lane_numbers': ','.join(
map(str, self.library1.lane_numbers)),
'form-1-id': self.library2.pk,
'form-1-name': 'UPDATED_2',
'form-1-reference': self.library2.reference,
'form-1-barcode_set': self.library2.barcode_set.uuid,
'form-1-barcode': '',
'form-1-barcode_set2': '',
'form-1-barcode2': '',
'form-1-lane_numbers': ','.join(
map(str, self.library2.lane_numbers)),
'form-1-DELETE': 'on',
}
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('flowcell_updatelibraries',
kwargs={'uuid': self.flow_cell.uuid}),
values)
# Check resulting database state
self.assertEqual(FlowCell.objects.all().count(), 1)
self.assertEqual(Library.objects.all().count(), 1)
library1 = Library.objects.get(pk=self.library1.pk)
self.assertEquals(library1.name, 'UPDATED')
self.assertEquals(
Library.objects.filter(pk=self.library2.pk).count(), 0)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('flowcell_view',
kwargs={'uuid': self.flow_cell.uuid}))
def test_prefill_form_first(self):
"""Test that prefilling the form with barcode1 works correctly"""
with self.login(self.user):
response = self.client.get(
reverse('flowcell_updatelibraries',
kwargs={'uuid': self.flow_cell.uuid}),
{'barcode1': self.barcode_set.uuid})
for form in response.context['formset'].forms[2:]:
self.assertEquals(form.initial['barcode_set'], self.barcode_set.uuid)
self.assertEquals(form.initial['barcode_set2'], None)
def test_prefill_form_second(self):
"""Test that prefilling the form with barcode2 works correctly"""
with self.login(self.user):
response = self.client.get(
reverse('flowcell_updatelibraries',
kwargs={'uuid': self.flow_cell.uuid}),
{'barcode2': self.barcode_set.uuid})
for form in response.context['formset'].forms[2:]:
self.assertEquals(form.initial['barcode_set'], None)
self.assertEquals(form.initial['barcode_set2'], self.barcode_set.uuid)
def test_prefill_form_both(self):
"""Test that prefilling the form with barcode1+2 works correctly"""
with self.login(self.user):
response = self.client.get(
reverse('flowcell_updatelibraries',
kwargs={'uuid': self.flow_cell.uuid}),
{'barcode1': self.barcode_set.uuid,
'barcode2': self.barcode_set.uuid})
for form in response.context['formset'].forms[2:]:
self.assertEquals(form.initial['barcode_set'], self.barcode_set.uuid)
self.assertEquals(form.initial['barcode_set2'], self.barcode_set.uuid)
# SequencingMachine related ----------------------------------------------
class TestSequencingMachineListView(
SuperUserTestCase, SequencingMachineMixin):
def setUp(self):
self.user = self.make_user()
self.machine = self._make_machine()
self.client = Client()
def test_render(self):
"""Simply test that rendering the list view works"""
with self.login(self.user):
response = self.client.get(reverse('instrument_list'))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['object_list']), 1)
class TestSequencingMachineCreateView(SuperUserTestCase):
def setUp(self):
self.user = self.make_user()
def test_render(self):
"""Simply test that post inserts a new flow cell and redirects to the
list view
"""
# Check precondition
self.assertEqual(SequencingMachine.objects.all().count(), 0)
# Simulate POST request
values = {
'vendor_id': 'NS5001234',
'label': 'NextSeq#1',
'description': 'In corner of lab 101',
'machine_model': models.MACHINE_MODEL_NEXTSEQ500,
'slot_count': 1,
'dual_index_workflow': models.INDEX_WORKFLOW_A,
}
# Simulate the POST
with self.login(self.user):
response = self.client.post(reverse('instrument_create'), values)
# Check resulting database state
self.assertEqual(SequencingMachine.objects.all().count(), 1)
instrument = SequencingMachine.objects.all()[0]
self.assertIsNotNone(instrument)
EXPECTED = {
'id': instrument.pk,
'vendor_id': 'NS5001234',
'label': 'NextSeq#1',
'description': 'In corner of lab 101',
'machine_model': models.MACHINE_MODEL_NEXTSEQ500,
'slot_count': 1,
'dual_index_workflow': models.INDEX_WORKFLOW_A,
}
self.assertEqual(model_to_dict(instrument), EXPECTED)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('instrument_view', kwargs={'uuid': instrument.uuid}))
class TestSequencingMachineDetailView(
SuperUserTestCase, SequencingMachineMixin):
def setUp(self):
self.user = self.make_user()
self.machine = self._make_machine()
self.client = Client()
def test_render(self):
"""Simply test that rendering the detail view works"""
# Simulate the GET
with self.login(self.user):
response = self.client.get(
reverse('instrument_view', kwargs={'uuid': self.machine.uuid}))
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['object'].pk,
self.machine.pk)
class TestSequencingMachineUpdateView(
SuperUserTestCase, SequencingMachineMixin):
def setUp(self):
self.user = self.make_user()
self.machine = self._make_machine()
self.client = Client()
def test_render(self):
"""Test that the instrument update POST works"""
# Check precondition
self.assertEqual(SequencingMachine.objects.all().count(), 1)
# Simulate POST request
values = model_to_dict(self.machine)
values['vendor_id'] = values['vendor_id'] + 'YADAYADAYADA'
values['machine_model'] = models.MACHINE_MODEL_HISEQ1000
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('instrument_update', kwargs={'uuid': self.machine.uuid}),
values)
# Check resulting database state
self.assertEqual(SequencingMachine.objects.all().count(), 1)
machine = SequencingMachine.objects.all()[0]
self.assertIsNotNone(machine)
EXPECTED = {
'id': machine.pk,
'vendor_id': values['vendor_id'],
'label': 'NextSeq#1',
'description': 'In corner of lab 101',
'machine_model': models.MACHINE_MODEL_HISEQ1000,
'slot_count': 1,
'dual_index_workflow': models.INDEX_WORKFLOW_A,
}
self.assertEqual(model_to_dict(machine), EXPECTED)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('instrument_view', kwargs={'uuid': machine.uuid}))
class TestSequencingMachineDeleteView(
SuperUserTestCase, SequencingMachineMixin):
def setUp(self):
self.user = self.make_user()
self.machine = self._make_machine()
self.client = Client()
def test_render(self):
"""Test that the instrument delete POST works"""
# Check precondition
self.assertEqual(SequencingMachine.objects.all().count(), 1)
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('instrument_delete', kwargs={'uuid': self.machine.uuid}))
# Check resulting database state
self.assertEqual(SequencingMachine.objects.all().count(), 0)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('instrument_list'))
class TestFlowCellSetExportView(
SuperUserTestCase, LibraryMixin, SequencingMachineMixin, FlowCellMixin,
BarcodeSetEntryMixin, BarcodeSetMixin):
def setUp(self):
self.user = self.make_user()
self.machine = self._make_machine()
self.barcode_set = self._make_barcode_set()
self.barcode = self._make_barcode_set_entry(self.barcode_set)
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'CGATATA')
self.flow_cell = self._make_flow_cell(
self.user, datetime.date(2016, 3, 3), self.machine, 815, 'A',
'BCDEFGHIXX', 'LABEL', 8, models.FLOWCELL_STATUS_SEQ_COMPLETE,
'John Doe', True, 1, models.RTA_VERSION_V2, 151, 'Description')
self.library = self._make_library(
self.flow_cell, 'LIB_001', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode, [1, 2],
self.barcode_set, self.barcode2)
def test_render(self):
# Simulate the GET
with self.login(self.user):
response = self.client.get(
reverse('flowcell_export',
kwargs={'uuid': self.flow_cell.uuid}))
# Check response
self.assertEqual(response.status_code, 200)
EXPECTED = textwrap.dedent(r"""
{
"run_date": "2016-03-03",
"sequencing_machine": "NS5001234",
"run_number": 815,
"slot": "A",
"vendor_id": "BCDEFGHIXX",
"label": "LABEL",
"description": "Description",
"num_lanes": 8,
"status": "seq_complete",
"operator": "John Doe",
"is_paired": true,
"index_read_count": 1,
"rta_version": 2,
"read_length": 151,
"libraries": [
{
"name": "LIB_001",
"reference": "hg19",
"barcode_set": "SureSelectTest",
"barcode_name": "AR01",
"barcode_sequence": "ACGTGTTA",
"barcode_set2": "SureSelectTest",
"barcode_name2": "AR02",
"barcode_sequence2": "CGATATA",
"lane_numbers": [
1,
2
]
}
]
}
""").lstrip()
self.maxDiff = None
self.assertEqual(response.content.decode('utf-8'), EXPECTED)
class TestFlowCellImportView(
SuperUserTestCase, SequencingMachineMixin,
BarcodeSetEntryMixin, BarcodeSetMixin):
def setUp(self):
self.user = self.make_user()
self.client = Client()
self.machine = self._make_machine()
self.barcode_set = self._make_barcode_set()
self.barcode = self._make_barcode_set_entry(self.barcode_set)
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'CGATATA')
def test_render(self):
# Prepare payload to post
payload = io.StringIO(textwrap.dedent(r"""
{
"run_date": "2016-03-03",
"sequencing_machine": "NS5001234",
"run_number": 815,
"slot": "A",
"vendor_id": "BCDEFGHIXX",
"label": "LABEL",
"description": "Description",
"num_lanes": 8,
"status": "seq_complete",
"operator": "John Doe",
"is_paired": true,
"index_read_count": 1,
"rta_version": 2,
"read_length": 151,
"libraries": [
{
"name": "LIB_001",
"reference": "hg19",
"barcode_set": "SureSelectTest",
"barcode_name": "AR01",
"barcode_sequence": "ACGTGTTA",
"barcode_set2": "SureSelectTest",
"barcode_name2": "AR02",
"barcode_sequence2": "CGATATA",
"lane_numbers": [
1,
2
]
}
]
}
""").lstrip())
# Check precondition
self.assertEqual(FlowCell.objects.all().count(), 0)
self.assertEqual(Library.objects.all().count(), 0)
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('flowcell_import'),
{'json_file': payload})
# Check response
flowcell = FlowCell.objects.order_by('-created')[0]
with self.login(self.user):
self.assertRedirects(
response, reverse('flowcell_view',
kwargs={'uuid': flowcell.uuid}))
# Check database state afterwards
self.assertEqual(FlowCell.objects.all().count(), 1)
self.assertEqual(Library.objects.all().count(), 1)
# BarcodeSet related -----------------------------------------------------
class TestBarcodeSetListView(
SuperUserTestCase, BarcodeSetMixin, BarcodeSetEntryMixin):
def setUp(self):
self.user = self.make_user()
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATATA')
self.client = Client()
def test_render(self):
"""Simply test that rendering the list view works"""
with self.login(self.user):
response = self.client.get(reverse('barcodeset_list'))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['object_list']), 1)
class TestBarcodeSetCreateView(SuperUserTestCase):
def setUp(self):
self.user = self.make_user()
def test_render(self):
"""Simply test that post inserts a new flow cell and redirects to the
list view
"""
# Check precondition
self.assertEqual(BarcodeSet.objects.all().count(), 0)
# Simulate POST request
values = {
'name': 'some_barcodes',
'short_name': 'SBS',
'description': 'Some barcode set',
}
# Simulate the POST
with self.login(self.user):
response = self.client.post(reverse('barcodeset_create'), values)
# Check resulting database state
self.assertEqual(
BarcodeSet.objects.filter(name='some_barcodes').count(), 1)
barcode_set = BarcodeSet.objects.filter(name='some_barcodes')[0]
self.assertIsNotNone(barcode_set)
EXPECTED = {
'id': barcode_set.pk,
'name': 'some_barcodes',
'short_name': 'SBS',
'description': 'Some barcode set',
}
self.assertEqual(model_to_dict(barcode_set), EXPECTED)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('barcodeset_view',
kwargs={'uuid': barcode_set.uuid}))
class TestBarcodeSetDetailView(
SuperUserTestCase, BarcodeSetMixin, BarcodeSetEntryMixin):
def setUp(self):
self.user = self.make_user()
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATAAA')
self.client = Client()
def test_render(self):
"""Simply test that rendering the detail view works"""
# Simulate the GET
with self.login(self.user):
response = self.client.get(
reverse('barcodeset_view',
kwargs={'uuid': self.barcode_set.uuid}))
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['object'].pk,
self.barcode_set.pk)
class TestBarcodeSetUpdateView(
SuperUserTestCase, BarcodeSetMixin, BarcodeSetEntryMixin):
def setUp(self):
self.user = self.make_user()
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATAAA')
self.client = Client()
def test_render(self):
"""Test that the barcode set update POST works"""
# Check precondition
self.assertEqual(BarcodeSet.objects.all().count(), 1)
# Simulate POST request
values = model_to_dict(self.barcode_set)
values['name'] = 'Another name'
values['description'] = 'This is the description'
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('barcodeset_update',
kwargs={'uuid': self.barcode_set.uuid}),
values)
# Check resulting database state
self.assertEqual(BarcodeSet.objects.all().count(), 1)
barcode_set = BarcodeSet.objects.get(pk=self.barcode_set.pk)
EXPECTED = {
'id': barcode_set.pk,
'name': values['name'],
'short_name': self.barcode_set.short_name,
'description': values['description'],
}
self.assertEqual(model_to_dict(barcode_set), EXPECTED)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('barcodeset_view',
kwargs={'uuid': barcode_set.uuid}))
class TestBarcodeSetDeleteView(
SuperUserTestCase, BarcodeSetMixin, BarcodeSetEntryMixin):
def setUp(self):
self.user = self.make_user()
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATAAA')
self.client = Client()
def test_render(self):
"""Test that the barcode set delete POST works"""
# Check precondition
self.assertEqual(BarcodeSet.objects.all().count(), 1)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 2)
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('barcodeset_delete',
kwargs={'uuid': self.barcode_set.uuid}))
# Check resulting database state
self.assertEqual(BarcodeSet.objects.all().count(), 0)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 0)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('barcodeset_list'))
class TestBarcodeSetUpdateEntriesView(
SuperUserTestCase, BarcodeSetMixin, BarcodeSetEntryMixin):
def setUp(self):
self.user = self.make_user()
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATAAA')
self.client = Client()
def _test_update(self, more_values):
"""Helper for testing the update functionality"""
# Check precondition
self.assertEqual(BarcodeSet.objects.all().count(), 1)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 2)
values = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '2',
'form-MIN_NUM_FORMS': '0',
'form-MAX_NUM_FORMS': '1',
'form-0-id': self.barcode1.pk,
'form-0-name': 'UPDATED',
'form-0-sequence': 'GATTACA',
'form-1-id': self.barcode2.pk,
'form-1-name': self.barcode2.name,
'form-1-sequence': self.barcode2.sequence,
}
values.update(more_values)
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('barcodeset_updateentries',
kwargs={'uuid': self.barcode_set.uuid}),
values)
# Check resulting database state
self.assertEqual(BarcodeSet.objects.all().count(), 1)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 2)
barcode1 = BarcodeSetEntry.objects.get(pk=self.barcode1.pk)
self.assertEquals(barcode1.name, 'UPDATED')
self.assertEquals(barcode1.sequence, 'GATTACA')
barcode2 = BarcodeSetEntry.objects.get(pk=self.barcode2.pk)
self.assertEquals(barcode2.name, self.barcode2.name)
self.assertEquals(barcode2.sequence, self.barcode2.sequence)
return response
def test_update(self):
"""Test that updating barcode set entries works correctly"""
response = self._test_update({'submit': 'submit'})
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('barcodeset_view',
kwargs={'uuid': self.barcode_set.uuid}))
def test_update_more(self):
"""Test that updating barcode set entries works correctly"""
response = self._test_update({'submit_more': 'submit_more'})
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('barcodeset_updateentries',
kwargs={'uuid': self.barcode_set.uuid}))
def test_add(self):
"""Test that adding barcode set entries works correctly"""
# Check precondition
self.assertEqual(BarcodeSet.objects.all().count(), 1)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 2)
values = {
'form-TOTAL_FORMS': '3',
'form-INITIAL_FORMS': '2',
'form-MIN_NUM_FORMS': '0',
'form-MAX_NUM_FORMS': '2',
'form-0-id': self.barcode1.pk,
'form-0-name': 'UPDATED',
'form-0-sequence': 'GATTACA',
'form-1-id': self.barcode2.pk,
'form-1-name': self.barcode2.name,
'form-1-sequence': self.barcode2.sequence,
'form-2-id': '',
'form-2-name': 'AR03',
'form-2-sequence': 'TAAATAAA',
}
# Ensure that no such barcode exists yet
self.assertEquals(
BarcodeSetEntry.objects.filter(sequence='TAAATAAA').count(), 0)
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('barcodeset_updateentries',
kwargs={'uuid': self.barcode_set.uuid}),
values)
# Check resulting database state
self.assertEqual(BarcodeSet.objects.all().count(), 1)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 3)
barcode1 = BarcodeSetEntry.objects.get(pk=self.barcode1.pk)
self.assertEquals(barcode1.name, 'UPDATED')
self.assertEquals(barcode1.sequence, 'GATTACA')
barcode2 = BarcodeSetEntry.objects.get(pk=self.barcode2.pk)
self.assertEquals(barcode2.name, self.barcode2.name)
self.assertEquals(barcode2.sequence, self.barcode2.sequence)
self.assertEquals(
BarcodeSetEntry.objects.filter(sequence='TAAATAAA').count(), 1)
barcode3 = BarcodeSetEntry.objects.filter(sequence='TAAATAAA')[0]
self.assertEquals(barcode3.name, 'AR03')
self.assertEquals(barcode3.sequence, 'TAAATAAA')
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('barcodeset_view',
kwargs={'uuid': self.barcode_set.uuid}))
def test_delete(self):
"""Test that deleting barcode set entries works correctly"""
# Check precondition
self.assertEqual(BarcodeSet.objects.all().count(), 1)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 2)
values = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '2',
'form-MIN_NUM_FORMS': '0',
'form-MAX_NUM_FORMS': '1',
'form-0-id': self.barcode1.pk,
'form-0-name': 'UPDATED',
'form-0-sequence': 'GATTACA',
'form-1-id': self.barcode2.pk,
'form-1-name': self.barcode2.name,
'form-1-sequence': self.barcode2.sequence,
'form-1-DELETE': 'on',
}
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('barcodeset_updateentries',
kwargs={'uuid': self.barcode_set.uuid}),
values)
# Check resulting database state
self.assertEqual(BarcodeSet.objects.all().count(), 1)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 1)
barcode1 = BarcodeSetEntry.objects.get(pk=self.barcode1.pk)
self.assertEquals(barcode1.name, 'UPDATED')
self.assertEquals(barcode1.sequence, 'GATTACA')
self.assertEquals(
BarcodeSetEntry.objects.filter(pk=self.barcode2.pk).count(), 0)
# Check resulting response
with self.login(self.user):
self.assertRedirects(
response, reverse('barcodeset_view',
kwargs={'uuid': self.barcode_set.uuid}))
class TestBarcodeSetExportView(
SuperUserTestCase, BarcodeSetMixin, BarcodeSetEntryMixin):
def setUp(self):
self.user = self.make_user()
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATAAA')
self.client = Client()
def test_render(self):
"""Simply test that rendering the detail view works"""
# Simulate the GET
with self.login(self.user):
response = self.client.get(
reverse('barcodeset_export',
kwargs={'uuid': self.barcode_set.uuid}))
# Check response
self.assertEqual(response.status_code, 200)
EXPECTED = textwrap.dedent(r"""
{
"name": "Agilent SureSelect XT Test",
"short_name": "SureSelectTest",
"description": null,
"entries": [
{
"name": "AR01",
"sequence": "CGATCGAT"
},
{
"name": "AR02",
"sequence": "ATTATAAA"
}
]
}
""").lstrip()
self.assertEqual(response.content.decode('utf-8'), EXPECTED)
class TestBarcodeSetImportView(SuperUserTestCase):
def setUp(self):
self.user = self.make_user()
self.client = Client()
def test_render(self):
"""Simply test that rendering the detail view works"""
# Prepare payload to post
payload = io.StringIO(textwrap.dedent(r"""
{
"name": "Agilent SureSelect XT Test",
"short_name": "SureSelectTest",
"description": null,
"entries": [
{
"name": "AR01",
"sequence": "CGATCGAT"
},
{
"name": "AR02",
"sequence": "ATTATAAA"
}
]
}
""").lstrip())
# Check precondition
self.assertEqual(BarcodeSet.objects.all().count(), 0)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 0)
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('barcodeset_import'),
{'json_file': payload})
# Check response
barcodeset = BarcodeSet.objects.order_by('-created')[0]
with self.login(self.user):
self.assertRedirects(
response, reverse('barcodeset_view',
kwargs={'uuid': barcodeset.uuid}))
# Check database state afterwards
self.assertEqual(BarcodeSet.objects.all().count(), 1)
self.assertEqual(BarcodeSetEntry.objects.all().count(), 2)
class TestSearchView(
SuperUserTestCase, FlowCellMixin, SequencingMachineMixin, LibraryMixin,
BarcodeSetMixin, BarcodeSetEntryMixin):
def setUp(self):
self.user = self.make_user()
self.client = Client()
# Create Machine
self.machine = self._make_machine()
# Create Barcode set
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATATA')
# Create Flow cell
self.flow_cell = self._make_flow_cell(
self.user, datetime.date(2016, 3, 3), self.machine, 815, 'A',
'BCDEFGHIXX', 'LABEL', 8, models.FLOWCELL_STATUS_SEQ_COMPLETE,
'John Doe', True, 1, models.RTA_VERSION_V2, 151, 'Description')
self.library1 = self._make_library(
self.flow_cell, 'LIB_001', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode1, [1, 2], None, None)
self.library2 = self._make_library(
self.flow_cell, 'LIB_002', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode2, [1, 2], None, None)
def test_with_result_of_two(self):
with self.login(self.user):
response = self.client.get(reverse('search'), {'q': 'LIB_00'})
self.assertEqual(len(response.context['results']), 2)
def test_with_result_of_one(self):
with self.login(self.user):
response = self.client.get(reverse('search'), {'q': '001'})
self.assertEqual(len(response.context['results']), 1)
self.assertEqual(response.context['results'][0].name, 'LIB_001')
def test_without_result(self):
with self.login(self.user):
response = self.client.get(reverse('search'), {'q': '003'})
self.assertEqual(len(response.context['results']), 0)
# Message Related --------------------------------------------------------
class TestMessageCreateView(
SuperUserTestCase, FlowCellMixin, SequencingMachineMixin, LibraryMixin,
BarcodeSetMixin, BarcodeSetEntryMixin):
def setUp(self):
self.user = self.make_user()
self.client = Client()
# Create Machine
self.machine = self._make_machine()
# Create Barcode set
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATATA')
# Create Flow cell
self.flow_cell = self._make_flow_cell(
self.user, datetime.date(2016, 3, 3), self.machine, 815, 'A',
'BCDEFGHIXX', 'LABEL', 8, models.FLOWCELL_STATUS_SEQ_COMPLETE,
'John Doe', True, 1, models.RTA_VERSION_V2, 151, 'Description')
self.library1 = self._make_library(
self.flow_cell, 'LIB_001', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode1, [1, 2], None, None)
self.library2 = self._make_library(
self.flow_cell, 'LIB_002', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode2, [1, 2], None, None)
def test_get(self):
with self.login(self.user):
response = self.client.get(reverse(
'flowcell_add_message',
kwargs={'related_uuid': self.flow_cell.uuid}))
self.assertEqual(response.status_code, 200)
def test_post(self):
payload = io.StringIO(textwrap.dedent(r"""
Example File Content
""").lstrip())
# Simulate POST request
values = {
'title': 'Message Title',
'body': 'Message Body',
'attachments': [payload],
}
self.assertEquals(
threads_models.Message.objects.all().count(), 0)
self.assertEquals(
threads_models.Attachment.objects.all().count(), 0)
self.assertEquals(
threads_models.AttachmentFile.objects.all().count(), 0)
# Simulate the POST
with self.login(self.user):
response = self.client.post(
reverse('flowcell_add_message',
kwargs={'related_uuid': self.flow_cell.uuid}),
values)
self.assertRedirects(
response, reverse(
'flowcell_view', kwargs={'uuid': self.flow_cell.uuid}))
self.assertEquals(
threads_models.Message.objects.all().count(), 1)
self.assertEquals(
threads_models.AttachmentFile.objects.all().count(), 1)
self.assertEquals(
threads_models.AttachmentFile.objects.all().count(), 1)
EXPECTED = {
'object_id': self.flow_cell.pk,
'title': values['title'],
'body': values['body'],
'author': self.user,
'mime_type': 'text/plain',
}
msg = threads_models.Message.objects.all()[0]
for key, value in EXPECTED.items():
self.assertEquals(getattr(msg, key), value)
att = threads_models.Attachment.objects.all()[0]
self.assertEquals(att.message_id, msg.pk)
att_file = threads_models.AttachmentFile.objects.all()[0]
self.assertEquals(att_file.bytes, 'RXhhbXBsZSBGaWxlIENvbnRlbnQK')
class MessageMixin:
@classmethod
def _make_message(cls, user, flow_cell, title, body):
msg = threads_models.Message.objects.create(
author=user,
content_type=ContentType.objects.get_for_model(flow_cell),
object_id=flow_cell.pk,
title=title,
body=body)
return msg
class TestMessageDeleteView(
SuperUserTestCase, FlowCellMixin, SequencingMachineMixin, LibraryMixin,
BarcodeSetMixin, BarcodeSetEntryMixin, MessageMixin):
def setUp(self):
self.user = self.make_user()
self.client = Client()
# Create Machine
self.machine = self._make_machine()
# Create Barcode set
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATATA')
# Create Flow cell
self.flow_cell = self._make_flow_cell(
self.user, datetime.date(2016, 3, 3), self.machine, 815, 'A',
'BCDEFGHIXX', 'LABEL', 8, models.FLOWCELL_STATUS_SEQ_COMPLETE,
'John Doe', True, 1, models.RTA_VERSION_V2, 151, 'Description')
self.library1 = self._make_library(
self.flow_cell, 'LIB_001', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode1, [1, 2], None, None)
self.library2 = self._make_library(
self.flow_cell, 'LIB_002', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode2, [1, 2], None, None)
# Create Message
self.message = self._make_message(
self.user, self.flow_cell, 'Some Title', 'Some Body')
def test_get(self):
with self.login(self.user):
response = self.client.get(reverse(
'flowcell_delete_message',
kwargs={'uuid': self.message.uuid}))
self.assertEqual(response.status_code, 200)
def test_post(self):
self.assertEquals(threads_models.Message.objects.all().count(), 1)
with self.login(self.user):
response = self.client.post(reverse(
'flowcell_delete_message',
kwargs={'uuid': self.message.uuid}))
self.assertRedirects(
response,
reverse('flowcell_view', kwargs={'uuid': self.flow_cell.uuid}))
self.assertEquals(threads_models.Message.objects.all().count(), 0)
class TestMessageUpdateView(
SuperUserTestCase, FlowCellMixin, SequencingMachineMixin, LibraryMixin,
BarcodeSetMixin, BarcodeSetEntryMixin, MessageMixin):
def setUp(self):
self.user = self.make_user()
self.client = Client()
# Create Machine
self.machine = self._make_machine()
# Create Barcode set
self.barcode_set = self._make_barcode_set()
self.barcode1 = self._make_barcode_set_entry(
self.barcode_set, 'AR01', 'CGATCGAT')
self.barcode2 = self._make_barcode_set_entry(
self.barcode_set, 'AR02', 'ATTATATA')
# Create Flow cell
self.flow_cell = self._make_flow_cell(
self.user, datetime.date(2016, 3, 3), self.machine, 815, 'A',
'BCDEFGHIXX', 'LABEL', 8, models.FLOWCELL_STATUS_SEQ_COMPLETE,
'John Doe', True, 1, models.RTA_VERSION_V2, 151, 'Description')
self.library1 = self._make_library(
self.flow_cell, 'LIB_001', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode1, [1, 2], None, None)
self.library2 = self._make_library(
self.flow_cell, 'LIB_002', models.REFERENCE_HUMAN,
self.barcode_set, self.barcode2, [1, 2], None, None)
# Create Message
self.message = self._make_message(
self.user, self.flow_cell, 'Some Title', 'Some Body')
def test_get(self):
with self.login(self.user):
response = self.client.get(reverse(
'flowcell_update_message',
kwargs={'uuid': self.message.uuid}))
self.assertEqual(response.context['object'], self.message)
self.assertEqual(response.status_code, 200)
def test_post(self):
self.assertEquals(threads_models.Message.objects.all().count(), 1)
values = {
'title': 'Updated Title',
'body': 'Updated Body',
}
with self.login(self.user):
response = self.client.post(reverse(
'flowcell_update_message',
kwargs={'uuid': self.message.uuid}),
values)
self.assertRedirects(
response,
reverse('flowcell_view', kwargs={'uuid': self.flow_cell.uuid}))
self.assertEquals(threads_models.Message.objects.all().count(), 1)
message = threads_models.Message.objects.all()[0]
self.assertEquals(message.title, 'Updated Title')
self.assertEquals(message.body, 'Updated Body')
| 37.850545 | 87 | 0.586368 | 6,153 | 59,009 | 5.464001 | 0.059646 | 0.047591 | 0.032064 | 0.028316 | 0.872606 | 0.853034 | 0.826978 | 0.813177 | 0.794914 | 0.766121 | 0 | 0.021468 | 0.289549 | 59,009 | 1,558 | 88 | 37.87484 | 0.780479 | 0.078598 | 0 | 0.735523 | 0 | 0 | 0.164206 | 0.01138 | 0 | 0 | 0 | 0 | 0.152118 | 1 | 0.063094 | false | 0.000864 | 0.014693 | 0 | 0.104581 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3422d5899b3fce548f02d77f127c97d8d0087a41 | 105 | py | Python | src/ethindex/util.py | dakingha69/py-eth-index | dadf0b2a84b51083f9d50546bd0924d6a286a8d2 | [
"MIT"
] | null | null | null | src/ethindex/util.py | dakingha69/py-eth-index | dadf0b2a84b51083f9d50546bd0924d6a286a8d2 | [
"MIT"
] | null | null | null | src/ethindex/util.py | dakingha69/py-eth-index | dadf0b2a84b51083f9d50546bd0924d6a286a8d2 | [
"MIT"
] | null | null | null | import pkg_resources
def get_version():
return pkg_resources.get_distribution("eth-index").version
| 17.5 | 62 | 0.790476 | 14 | 105 | 5.642857 | 0.714286 | 0.303797 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114286 | 105 | 5 | 63 | 21 | 0.849462 | 0 | 0 | 0 | 0 | 0 | 0.085714 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 8 |
347489936401391e8d05f956a0fa851901ffb3fa | 8,756 | py | Python | keras_question_and_answering_system/library/utility/qa_embed_data_utils.py | iamatulsingh/QnA-web-api | 4db0388248ce5714eaa6bc518ff0b40bb5eb9cf8 | [
"MIT"
] | 25 | 2017-12-29T03:55:43.000Z | 2022-03-22T03:55:04.000Z | keras_question_and_answering_system/library/utility/qa_embed_data_utils.py | iamatulsingh/QnA-web-api | 4db0388248ce5714eaa6bc518ff0b40bb5eb9cf8 | [
"MIT"
] | 3 | 2018-02-15T19:19:56.000Z | 2019-12-24T09:11:47.000Z | keras_question_and_answering_system/library/utility/qa_embed_data_utils.py | iamatulsingh/QnA-web-api | 4db0388248ce5714eaa6bc518ff0b40bb5eb9cf8 | [
"MIT"
] | 16 | 2018-01-11T15:09:47.000Z | 2022-01-09T08:44:40.000Z | from collections import Counter
import numpy as np
from sklearn.model_selection import train_test_split
from keras_question_and_answering_system.library.utility.text_utils import in_white_list
import nltk
class SQuADSeq2SeqEmbTripleSamples(object):
input_paragraph_max_seq_length = None
input_question_max_seq_length = None
target_max_seq_length = None
word2emb = None
target_word2idx = None
target_idx2word = None
num_target_tokens = None
samples = None
dataset = None
def __init__(self, dataset, word2emb, embed_size, max_target_vocab_size=None):
if max_target_vocab_size is None:
max_target_vocab_size = 5000
self.dataset = dataset
self.word2emb = word2emb
self.input_data_samples = []
self.output_data_samples = []
self.input_paragraph_max_seq_length = 0
self.input_question_max_seq_length = 0
self.target_max_seq_length = 0
unknown_emb = np.zeros(shape=embed_size)
target_counter = Counter()
input_data_samples = []
output_data_samples = []
for sample in self.dataset.data:
paragraph, question, answer = sample
paragraph_word_list = [w.lower() for w in nltk.word_tokenize(paragraph) if in_white_list(w)]
question_word_list = [w.lower() for w in nltk.word_tokenize(question) if in_white_list(w)]
answer_word_list = [w.lower() for w in nltk.word_tokenize(answer) if in_white_list(w)]
output_data = ['START'] + answer_word_list + ['END']
input_data_samples.append([paragraph_word_list, question_word_list])
output_data_samples.append(output_data)
for w in output_data:
target_counter[w] += 1
self.input_paragraph_max_seq_length = max(self.input_paragraph_max_seq_length, len(paragraph_word_list))
self.input_question_max_seq_length = max(self.input_question_max_seq_length, len(question_word_list))
self.target_max_seq_length = max(self.target_max_seq_length, len(output_data))
self.target_word2idx = dict()
for idx, word in enumerate(target_counter.most_common(max_target_vocab_size)):
self.target_word2idx[word[0]] = idx + 1
self.target_word2idx['UNK'] = 0
self.target_idx2word = dict([(idx, word) for word, idx in self.target_word2idx.items()])
self.num_target_tokens = len(self.target_idx2word)
input_encoded_data_samples = []
target_encoded_data_samples = []
for input_data, output_data in zip(input_data_samples, output_data_samples):
input_paragraph_encoded_data = []
input_question_encoded_data = []
target_encoded_data = []
input_paragraph_data, input_question_data = input_data
for word in input_question_data:
if word in self.word2emb:
input_question_encoded_data.append(self.word2emb[word])
else:
input_question_encoded_data.append(unknown_emb)
for word in input_paragraph_data:
if word in self.word2emb:
input_paragraph_encoded_data.append(self.word2emb[word])
else:
input_paragraph_encoded_data.append(unknown_emb)
for word in output_data:
if word in self.target_word2idx:
target_encoded_data.append(self.target_word2idx[word])
else:
target_encoded_data.append(0)
input_encoded_data_samples.append([input_paragraph_encoded_data, input_question_encoded_data])
target_encoded_data_samples.append(target_encoded_data)
self.samples = [input_encoded_data_samples, target_encoded_data_samples]
def save(self, dir_path, embed_type):
np.save(dir_path + '/seq2seq-' + embed_type + '-target-word2idx.npy', self.target_word2idx)
np.save(dir_path + '/seq2seq-' + embed_type + '-target-idx2word.npy', self.target_idx2word)
config = dict()
config['num_target_tokens'] = self.num_target_tokens
config['input_question_max_seq_length'] = self.input_question_max_seq_length
config['input_paragraph_max_seq_length'] = self.input_paragraph_max_seq_length
config['target_max_seq_length'] = self.target_max_seq_length
print(config)
np.save(dir_path + '/seq2seq-' + embed_type + '-config.npy', config)
def size(self):
return self.dataset.size()
def get_samples(self):
return self.samples
def split(self, test_size, random_state):
input_data, target_data = self.samples
return train_test_split(input_data, target_data, test_size=test_size,
random_state=random_state)
class SQuADSeq2SeqEmbTupleSamples(object):
input_max_seq_length = None
target_max_seq_length = None
word2emb = None
target_word2idx = None
target_idx2word = None
num_target_tokens = None
samples = None
data_set = None
def __init__(self, data_set, word2emb, embed_size, max_target_vocab_size=None):
if max_target_vocab_size is None:
max_target_vocab_size = 5000
self.data_set = data_set
self.word2emb = word2emb
self.input_data_samples = []
self.output_data_samples = []
self.input_max_seq_length = 0
self.target_max_seq_length = 0
unknown_emb = np.zeros(shape=embed_size)
target_counter = Counter()
input_data_samples = []
output_data_samples = []
for sample in self.data_set.data:
paragraph, question, answer = sample
paragraph_word_list = [w.lower() for w in nltk.word_tokenize(paragraph) if in_white_list(w)]
question_word_list = [w.lower() for w in nltk.word_tokenize(question) if in_white_list(w)]
answer_word_list = [w.lower() for w in nltk.word_tokenize(answer) if in_white_list(w)]
input_data = paragraph_word_list + ['question'] + question_word_list
output_data = ['START'] + answer_word_list + ['END']
input_data_samples.append(input_data)
output_data_samples.append(output_data)
for w in output_data:
target_counter[w] += 1
self.input_max_seq_length = max(self.input_max_seq_length, len(input_data))
self.target_max_seq_length = max(self.target_max_seq_length, len(output_data))
self.target_word2idx = dict()
for idx, word in enumerate(target_counter.most_common(max_target_vocab_size)):
self.target_word2idx[word[0]] = idx + 1
self.target_word2idx['UNK'] = 0
self.target_idx2word = dict([(idx, word) for word, idx in self.target_word2idx.items()])
self.num_target_tokens = len(self.target_idx2word)
input_encoded_data_samples = []
target_encoded_data_samples = []
for input_data, output_data in zip(input_data_samples, output_data_samples):
input_encoded_data = []
target_encoded_data = []
for word in input_data:
if word in self.word2emb:
input_encoded_data.append(self.word2emb[word])
else:
input_encoded_data.append(unknown_emb)
for word in output_data:
if word in self.target_word2idx:
target_encoded_data.append(self.target_word2idx[word])
else:
target_encoded_data.append(0)
input_encoded_data_samples.append(input_encoded_data)
target_encoded_data_samples.append(target_encoded_data)
self.samples = [input_encoded_data_samples, target_encoded_data_samples]
def save(self, dir_path, embed_type):
np.save(dir_path + '/seq2seq-' + embed_type + '-target-word2idx.npy', self.target_word2idx)
np.save(dir_path + '/seq2seq-' + embed_type + '-target-idx2word.npy', self.target_idx2word)
config = dict()
config['num_target_tokens'] = self.num_target_tokens
config['input_max_seq_length'] = self.input_max_seq_length
config['target_max_seq_length'] = self.target_max_seq_length
print(config)
np.save(dir_path + '/seq2seq-' + embed_type + '-config.npy', config)
def size(self):
return self.data_set.size()
def get_samples(self):
return self.samples
def split(self, test_size, random_state):
input_data, target_data = self.samples
return train_test_split(input_data, target_data, test_size=test_size,
random_state=random_state)
| 38.069565 | 116 | 0.662175 | 1,114 | 8,756 | 4.81149 | 0.085278 | 0.065672 | 0.067164 | 0.040299 | 0.897015 | 0.85653 | 0.809515 | 0.793284 | 0.76306 | 0.76306 | 0 | 0.01122 | 0.256967 | 8,756 | 229 | 117 | 38.235808 | 0.812634 | 0 | 0 | 0.718563 | 0 | 0 | 0.038945 | 0.011535 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05988 | false | 0 | 0.02994 | 0.023952 | 0.239521 | 0.011976 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3475adac1fd20e64979f0cdfa554c2406d3bf629 | 105 | py | Python | dependes/__init__.py | yinzhiqing/ssh-manager | e5b1268f0f907ed82613cf3bbf3b103c300ab666 | [
"MIT"
] | null | null | null | dependes/__init__.py | yinzhiqing/ssh-manager | e5b1268f0f907ed82613cf3bbf3b103c300ab666 | [
"MIT"
] | null | null | null | dependes/__init__.py | yinzhiqing/ssh-manager | e5b1268f0f907ed82613cf3bbf3b103c300ab666 | [
"MIT"
] | null | null | null |
from .tomlkit.tomlkit.toml_document import TOMLDocument
from .tomlkit.tomlkit.toml_file import TOMLFile
| 26.25 | 55 | 0.857143 | 14 | 105 | 6.285714 | 0.571429 | 0.25 | 0.409091 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085714 | 105 | 3 | 56 | 35 | 0.916667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
1b1210994228655884d1060fea251cf0f76c249b | 137 | py | Python | fastprocesspool/__init__.py | brmmm3/fastprocesspool | 76f8c484cbb34b8bf80a73f54543b98b224609cc | [
"MIT"
] | 1 | 2018-07-02T07:51:02.000Z | 2018-07-02T07:51:02.000Z | fastprocesspool/__init__.py | brmmm3/fastprocesspool | 76f8c484cbb34b8bf80a73f54543b98b224609cc | [
"MIT"
] | null | null | null | fastprocesspool/__init__.py | brmmm3/fastprocesspool | 76f8c484cbb34b8bf80a73f54543b98b224609cc | [
"MIT"
] | null | null | null |
import sys
if sys.version_info[0] > 2:
from fastprocesspool.fastprocesspool import Pool
else:
from fastprocesspool import Pool
| 17.125 | 52 | 0.766423 | 18 | 137 | 5.777778 | 0.611111 | 0.365385 | 0.480769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017857 | 0.182482 | 137 | 7 | 53 | 19.571429 | 0.910714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
1b7c9f875d5019dfa996a34ecb0697bbb45f722b | 1,007 | py | Python | wrappers/serial/image/operations.py | cnwangfeng/algorithm-reference-library | 9605eb01652fbfcb9ff003cc12b44c84093b7fb1 | [
"Apache-2.0"
] | 22 | 2016-12-14T11:20:07.000Z | 2021-08-13T15:23:41.000Z | wrappers/serial/image/operations.py | cnwangfeng/algorithm-reference-library | 9605eb01652fbfcb9ff003cc12b44c84093b7fb1 | [
"Apache-2.0"
] | 30 | 2017-06-27T09:15:38.000Z | 2020-09-11T18:16:37.000Z | wrappers/serial/image/operations.py | cnwangfeng/algorithm-reference-library | 9605eb01652fbfcb9ff003cc12b44c84093b7fb1 | [
"Apache-2.0"
] | 20 | 2017-07-02T03:45:49.000Z | 2019-12-11T17:19:01.000Z | """ Image operations visible to the Execution Framework as Components
"""
from processing_components.image.operations import export_image_to_fits
from processing_components.image.operations import import_image_from_fits
from processing_components.image.operations import reproject_image
from processing_components.image.operations import add_image
from processing_components.image.operations import qa_image
from processing_components.image.operations import show_image
from processing_components.image.operations import show_components
from processing_components.image.operations import smooth_image
from processing_components.image.operations import calculate_image_frequency_moments
from processing_components.image.operations import calculate_image_from_frequency_moments
from processing_components.image.operations import remove_continuum_image
from processing_components.image.operations import convert_stokes_to_polimage
from processing_components.image.operations import convert_polimage_to_stokes | 59.235294 | 89 | 0.904667 | 127 | 1,007 | 6.858268 | 0.212598 | 0.241102 | 0.358209 | 0.432836 | 0.832377 | 0.832377 | 0.832377 | 0.347876 | 0 | 0 | 0 | 0 | 0.063555 | 1,007 | 17 | 90 | 59.235294 | 0.923648 | 0.064548 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
945e30dfcb34f40007230b8b6c23ef6b668c73bf | 197 | py | Python | python/src/pyddup/core/__init__.py | mfleischhacker/deduplication | ed98fabf2e4aad17800c1207659494e3f4229185 | [
"MIT"
] | null | null | null | python/src/pyddup/core/__init__.py | mfleischhacker/deduplication | ed98fabf2e4aad17800c1207659494e3f4229185 | [
"MIT"
] | null | null | null | python/src/pyddup/core/__init__.py | mfleischhacker/deduplication | ed98fabf2e4aad17800c1207659494e3f4229185 | [
"MIT"
] | null | null | null | from sys import version_info
if version_info.major == 2:
from pyddup.core.abstracts import *
from pyddup.core.settings import *
# from tools import *
from pyddup.core.util import * | 28.142857 | 39 | 0.715736 | 28 | 197 | 4.964286 | 0.5 | 0.215827 | 0.302158 | 0.28777 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00641 | 0.208122 | 197 | 7 | 40 | 28.142857 | 0.884615 | 0.096447 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.8 | 0 | 0.8 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
948306b84fa0dca8c6ef03ac78b76798e5054519 | 5,620 | py | Python | test/grab_upload_file.py | mawentao007/reading_grab | a8b64d235d60e5c895e70f59739888f6748d4407 | [
"MIT"
] | null | null | null | test/grab_upload_file.py | mawentao007/reading_grab | a8b64d235d60e5c895e70f59739888f6748d4407 | [
"MIT"
] | null | null | null | test/grab_upload_file.py | mawentao007/reading_grab | a8b64d235d60e5c895e70f59739888f6748d4407 | [
"MIT"
] | null | null | null | # coding: utf-8
from grab import UploadContent, UploadFile
import tempfile
import os
import pycurl
from test.util import build_grab
from test.util import BaseGrabTestCase
class TestUploadContent(BaseGrabTestCase):
def setUp(self):
self.server.reset()
def prepare_form_grab(self):
url = self.server.get_url()
html = ('''<form action="%s" method="post" enctype="multipart/form-data">
<input type="file" name="image">
</form>''' % url).encode('ascii')
g = build_grab(html, charset='utf-8')
return g
# *******************
# UploadContent Tests
# *******************
def test_upload_content_filename(self):
g = self.prepare_form_grab()
data = b'foo'
upload_data = UploadContent(data, filename='avatar.jpg')
g.doc.set_input('image', upload_data)
g.doc.submit(make_request=False)
post = dict(g.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadContent))
g.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
self.assertEqual('avatar.jpg',
self.server.request['files']['image'][0]['filename'])
self.assertEqual(
'image/jpeg',
self.server.request['files']['image'][0]['content_type'])
def test_upload_content_random_filename(self):
g = self.prepare_form_grab()
data = b'foo'
upload_data = UploadContent(data)
g.doc.set_input('image', upload_data)
g.doc.submit(make_request=False)
post = dict(g.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadContent))
g.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
self.assertTrue(
10, len(self.server.request['files']['image'][0]['filename']))
self.assertEqual(
'application/octet-stream',
self.server.request['files']['image'][0]['content_type'])
def test_upload_content_content_type(self):
g = self.prepare_form_grab()
data = b'foo'
upload_data = UploadContent(data,
content_type='application/grab')
g.doc.set_input('image', upload_data)
g.doc.submit(make_request=False)
post = dict(g.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadContent))
g.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
self.assertTrue(
10, len(self.server.request['files']['image'][0]['filename']))
self.assertEqual(
'application/grab',
self.server.request['files']['image'][0]['content_type'])
# ****************
# UploadFile Tests
# ****************
def test_upload_file(self):
g = self.prepare_form_grab()
fh, file_path = tempfile.mkstemp()
data = b'foo'
with open(file_path, 'wb') as out:
out.write(data)
upload_data = UploadFile(file_path)
g.doc.set_input('image', upload_data)
g.doc.submit(make_request=False)
post = dict(g.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadFile))
g.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
filename = os.path.split(file_path)[1]
self.assertEqual(filename,
self.server.request['files']['image'][0]['filename'])
self.assertEqual(
'application/octet-stream',
self.server.request['files']['image'][0]['content_type'])
def test_upload_file_custom_filename(self):
g = self.prepare_form_grab()
fh, file_path = tempfile.mkstemp()
data = b'foo'
with open(file_path, 'wb') as out:
out.write(data)
upload_data = UploadFile(file_path, filename='avatar.jpg')
g.doc.set_input('image', upload_data)
g.doc.submit(make_request=False)
post = dict(g.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadFile))
g.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
self.assertEqual('avatar.jpg',
self.server.request['files']['image'][0]['filename'])
self.assertEqual(
'image/jpeg',
self.server.request['files']['image'][0]['content_type'])
def test_upload_file_custom_content_type(self):
g = self.prepare_form_grab()
fh, file_path = tempfile.mkstemp()
data = b'foo'
with open(file_path, 'wb') as out:
out.write(data)
upload_data = UploadFile(file_path, filename='avatar.jpg',
content_type='application/grab')
g.doc.set_input('image', upload_data)
g.doc.submit(make_request=False)
post = dict(g.config['multipart_post'])
self.assertTrue(isinstance(post['image'], UploadFile))
g.doc.submit()
self.assertEqual(data,
self.server.request['files']['image'][0]['body'])
self.assertEqual('avatar.jpg',
self.server.request['files']['image'][0]['filename'])
self.assertEqual(
'application/grab',
self.server.request['files']['image'][0]['content_type'])
| 37.466667 | 81 | 0.572242 | 627 | 5,620 | 4.990431 | 0.135566 | 0.063918 | 0.097795 | 0.126558 | 0.841483 | 0.841483 | 0.841483 | 0.838926 | 0.831895 | 0.831895 | 0 | 0.006078 | 0.268149 | 5,620 | 149 | 82 | 37.718121 | 0.75468 | 0.022064 | 0 | 0.758065 | 0 | 0 | 0.14887 | 0.014213 | 0 | 0 | 0 | 0 | 0.193548 | 1 | 0.064516 | false | 0 | 0.048387 | 0 | 0.129032 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ca4e4f4f4af0fdd99f7cc8c4c80e29c08008fbd4 | 187 | py | Python | src/oop/Calculator.py | tborzyszkowski/TestAutomationInPython | 843c71df796588e181466d9b9b549f03dd907a6e | [
"MIT"
] | 2 | 2020-10-08T09:44:12.000Z | 2021-10-08T08:32:19.000Z | src/oop/Calculator.py | tborzyszkowski/TestAutomationInPython | 843c71df796588e181466d9b9b549f03dd907a6e | [
"MIT"
] | null | null | null | src/oop/Calculator.py | tborzyszkowski/TestAutomationInPython | 843c71df796588e181466d9b9b549f03dd907a6e | [
"MIT"
] | 1 | 2020-10-19T14:08:00.000Z | 2020-10-19T14:08:00.000Z | class Calculator:
def __init__(self) -> None:
self.state = 0
def add(self, number):
self.state += number
def mult(self, number):
self.state *= number | 20.777778 | 31 | 0.572193 | 23 | 187 | 4.478261 | 0.478261 | 0.262136 | 0.271845 | 0.368932 | 0.485437 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007752 | 0.31016 | 187 | 9 | 32 | 20.777778 | 0.790698 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.428571 | false | 0 | 0 | 0 | 0.571429 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
ca4eff22acd7a90d57c850bc22dbd223169a4584 | 6,737 | py | Python | test/unit/network/gremlin/test_gremlin_path_pattern.py | Sam-Martin/graph-notebook | e7a8cefb93891ea19d6df4f17fe0acca3e223ae9 | [
"ISC",
"Apache-2.0",
"CC0-1.0"
] | 378 | 2020-11-02T23:44:37.000Z | 2022-03-31T17:07:16.000Z | test/unit/network/gremlin/test_gremlin_path_pattern.py | QPC-database/graph-notebook | ea162e47c2c2e5600417e6ad9ac34aa7ac462899 | [
"ISC",
"Apache-2.0",
"CC0-1.0"
] | 124 | 2020-11-07T14:35:28.000Z | 2022-03-29T21:07:09.000Z | test/unit/network/gremlin/test_gremlin_path_pattern.py | QPC-database/graph-notebook | ea162e47c2c2e5600417e6ad9ac34aa7ac462899 | [
"ISC",
"Apache-2.0",
"CC0-1.0"
] | 76 | 2020-11-04T03:52:08.000Z | 2022-03-31T17:17:06.000Z | """
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
"""
import unittest
from gremlin_python.structure.graph import Path
from graph_notebook.network.gremlin.GremlinNetwork import GremlinNetwork, PathPattern
class TestAddResultsPathPattern(unittest.TestCase):
def test_add_all_V_pattern(self):
pattern = [PathPattern.V, PathPattern.V, PathPattern.V]
path = Path([], ['SEA', 'DFW', 'AUS'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(3, len(gn.graph.nodes))
self.assertEqual(2, len(gn.graph.edges))
def test_add_v_and_inV_pattern(self):
pattern = [PathPattern.V, PathPattern.IN_V, PathPattern.V]
path = Path([], ['SEA', 'DFW', 'AUS'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
for tup in gn.graph.edges:
self.assertEqual(tup[1], 'DFW') # assert that DFW is the incoming vertex for both edges.
self.assertEqual(3, len(gn.graph.nodes))
self.assertEqual(2, len(gn.graph.edges))
def test_add_v_and_outV_pattern(self):
pattern = [PathPattern.V, PathPattern.OUT_V, PathPattern.V]
path = Path([], ['SEA', 'DFW', 'AUS'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
for tup in gn.graph.edges:
self.assertEqual(tup[0], 'DFW') # assert that DFW is the incoming vertex for both edges.
self.assertEqual(3, len(gn.graph.nodes))
self.assertEqual(2, len(gn.graph.edges))
def test_add_v_outV_inV_pattern(self):
pattern = [PathPattern.V, PathPattern.OUT_V, PathPattern.IN_V]
path = Path([], ['SEA', 'DFW', 'AUS'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(3, len(gn.graph.nodes))
self.assertEqual(2, len(gn.graph.edges))
edges = gn.graph.out_edges('DFW')
self.assertEqual(2, len(edges))
def test_add_v_inV_outV_pattern(self):
pattern = [PathPattern.V, PathPattern.IN_V, PathPattern.OUT_V]
path = Path([], ['SEA', 'DFW', 'AUS'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(3, len(gn.graph.nodes))
self.assertEqual(2, len(gn.graph.edges))
edges = gn.graph.in_edges('DFW')
self.assertEqual(2, len(edges))
def test_add_v_inV_outV_longer_path(self):
pattern = [PathPattern.V, PathPattern.IN_V, PathPattern.OUT_V]
path = Path([], ['SEA', 'DFW', 'AUS', 'LAX', 'JFK'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(5, len(gn.graph.nodes))
self.assertEqual(4, len(gn.graph.edges))
dfw_edges = gn.graph.in_edges('DFW')
self.assertEqual(2, len(dfw_edges))
lax_edges = gn.graph.in_edges('LAX')
self.assertEqual(1, len(lax_edges))
jfk_edges = gn.graph.in_edges('JFK')
self.assertEqual(1, len(jfk_edges))
def test_add_v_e_v_path(self):
pattern = [PathPattern.V, PathPattern.E, PathPattern.V]
path = Path([], ['SEA', 'route', 'DFW'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(2, len(gn.graph.nodes))
self.assertEqual(1, len(gn.graph.edges))
self.assertIsNotNone(gn.graph.edges[('SEA', 'DFW', 'route')])
def test_add_v_inE_v_path(self):
pattern = [PathPattern.V, PathPattern.E, PathPattern.V]
path = Path([], ['SEA', 'route', 'DFW'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(2, len(gn.graph.nodes))
self.assertEqual(1, len(gn.graph.edges))
self.assertIsNotNone(gn.graph.edges[('SEA', 'DFW', 'route')])
def test_add_v_outE_path(self):
pattern = [PathPattern.V, PathPattern.OUT_E, PathPattern.V]
path = Path([], ['SEA', 'route', 'DFW'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(2, len(gn.graph.nodes))
self.assertEqual(1, len(gn.graph.edges))
self.assertIsNotNone(gn.graph.edges[('SEA', 'DFW', 'route')])
def test_add_v_inE_path(self):
pattern = [PathPattern.V, PathPattern.IN_E, PathPattern.V]
path = Path([], ['SEA', 'route', 'DFW'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(2, len(gn.graph.nodes))
self.assertEqual(1, len(gn.graph.edges))
self.assertIsNotNone(gn.graph.edges[('DFW', 'SEA', 'route')])
def test_add_inV_E_V_path(self):
pattern = [PathPattern.IN_V, PathPattern.E, PathPattern.V]
path = Path([], ['SEA', 'route', 'DFW'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(2, len(gn.graph.nodes))
self.assertEqual(1, len(gn.graph.edges))
self.assertIsNotNone(gn.graph.edges[('DFW', 'SEA', 'route')])
def test_add_outV_E_V_path(self):
pattern = [PathPattern.OUT_V, PathPattern.E, PathPattern.V]
path = Path([], ['SEA', 'route', 'DFW'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(2, len(gn.graph.nodes))
self.assertEqual(1, len(gn.graph.edges))
self.assertIsNotNone(gn.graph.edges[('SEA', 'DFW', 'route')])
def test_add_outV_E_inV_path(self):
pattern = [PathPattern.OUT_V, PathPattern.E, PathPattern.IN_V]
path = Path([], ['SEA', 'route', 'DFW'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(2, len(gn.graph.nodes))
self.assertEqual(1, len(gn.graph.edges))
self.assertIsNotNone(gn.graph.edges[('SEA', 'DFW', 'route')])
def test_add_V_inE_V_path(self):
pattern = [PathPattern.V, PathPattern.IN_E, PathPattern.V]
path = Path([], ['SEA', 'route', 'DFW'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(2, len(gn.graph.nodes))
self.assertEqual(1, len(gn.graph.edges))
self.assertIsNotNone(gn.graph.edges[('DFW', 'SEA', 'route')])
def test_add_V_outE_V_path(self):
pattern = [PathPattern.V, PathPattern.OUT_E, PathPattern.V]
path = Path([], ['SEA', 'route', 'DFW'])
gn = GremlinNetwork()
gn.add_results_with_pattern([path], pattern)
self.assertEqual(2, len(gn.graph.nodes))
self.assertEqual(1, len(gn.graph.edges))
self.assertIsNotNone(gn.graph.edges[('SEA', 'DFW', 'route')])
| 39.629412 | 101 | 0.627876 | 874 | 6,737 | 4.671625 | 0.083524 | 0.078864 | 0.073475 | 0.079109 | 0.904237 | 0.891012 | 0.860642 | 0.856968 | 0.856968 | 0.800147 | 0 | 0.007378 | 0.215378 | 6,737 | 169 | 102 | 39.863905 | 0.76504 | 0.031616 | 0 | 0.704545 | 0 | 0 | 0.042824 | 0 | 0 | 0 | 0 | 0 | 0.348485 | 1 | 0.113636 | false | 0 | 0.022727 | 0 | 0.143939 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ca5d4f5ce74be58df5814ac80c3f17195337439a | 149 | py | Python | webserver/controllers/__init__.py | elamperti/bastardbot | efdc4058e99aaf5e74d34a304fed564473d6ca76 | [
"MIT"
] | 11 | 2015-01-27T22:37:52.000Z | 2022-02-09T23:41:50.000Z | webserver/controllers/__init__.py | elamperti/bastardbot | efdc4058e99aaf5e74d34a304fed564473d6ca76 | [
"MIT"
] | null | null | null | webserver/controllers/__init__.py | elamperti/bastardbot | efdc4058e99aaf5e74d34a304fed564473d6ca76 | [
"MIT"
] | 3 | 2015-01-23T16:32:03.000Z | 2022-02-09T23:41:51.000Z | from webserver.controllers import apicontroller
from webserver.controllers import bastardcontroller
from webserver.controllers import logincontroller | 49.666667 | 51 | 0.90604 | 15 | 149 | 9 | 0.466667 | 0.288889 | 0.533333 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.073826 | 149 | 3 | 52 | 49.666667 | 0.978261 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
ca70d275af76bc130e399f6fcd47c55782b55292 | 36,166 | py | Python | sdk/python/pulumi_oci/ocvp/esxi_host.py | EladGabay/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2021-08-17T11:14:46.000Z | 2021-12-31T02:07:03.000Z | sdk/python/pulumi_oci/ocvp/esxi_host.py | pulumi-oci/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-09-06T11:21:29.000Z | 2021-09-06T11:21:29.000Z | sdk/python/pulumi_oci/ocvp/esxi_host.py | pulumi-oci/pulumi-oci | 6841e27d4a1a7e15c672306b769912efbfd3ba99 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2021-08-24T23:31:30.000Z | 2022-01-02T19:26:54.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['EsxiHostArgs', 'EsxiHost']
@pulumi.input_type
class EsxiHostArgs:
def __init__(__self__, *,
sddc_id: pulumi.Input[str],
current_sku: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
next_sku: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a EsxiHost resource.
:param pulumi.Input[str] sddc_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the SDDC to add the ESXi host to.
:param pulumi.Input[str] current_sku: Billing option selected during SDDC creation. Oracle Cloud Infrastructure VMware Solution supports the following billing interval SKUs: HOUR, MONTH, ONE_YEAR, and THREE_YEARS. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A descriptive name for the ESXi host. It's changeable. Esxi Host name requirements are 1-16 character length limit, Must start with a letter, Must be English letters, numbers, - only, No repeating hyphens, Must be unique within the SDDC.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] next_sku: (Updatable) Billing option to switch to once existing billing cycle ends. If nextSku is null or empty, currentSku will be used to continue with next billing term. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
"""
pulumi.set(__self__, "sddc_id", sddc_id)
if current_sku is not None:
pulumi.set(__self__, "current_sku", current_sku)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if next_sku is not None:
pulumi.set(__self__, "next_sku", next_sku)
@property
@pulumi.getter(name="sddcId")
def sddc_id(self) -> pulumi.Input[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the SDDC to add the ESXi host to.
"""
return pulumi.get(self, "sddc_id")
@sddc_id.setter
def sddc_id(self, value: pulumi.Input[str]):
pulumi.set(self, "sddc_id", value)
@property
@pulumi.getter(name="currentSku")
def current_sku(self) -> Optional[pulumi.Input[str]]:
"""
Billing option selected during SDDC creation. Oracle Cloud Infrastructure VMware Solution supports the following billing interval SKUs: HOUR, MONTH, ONE_YEAR, and THREE_YEARS. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
"""
return pulumi.get(self, "current_sku")
@current_sku.setter
def current_sku(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "current_sku", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A descriptive name for the ESXi host. It's changeable. Esxi Host name requirements are 1-16 character length limit, Must start with a letter, Must be English letters, numbers, - only, No repeating hyphens, Must be unique within the SDDC.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="nextSku")
def next_sku(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Billing option to switch to once existing billing cycle ends. If nextSku is null or empty, currentSku will be used to continue with next billing term. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
"""
return pulumi.get(self, "next_sku")
@next_sku.setter
def next_sku(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_sku", value)
@pulumi.input_type
class _EsxiHostState:
def __init__(__self__, *,
billing_contract_end_date: Optional[pulumi.Input[str]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
compute_instance_id: Optional[pulumi.Input[str]] = None,
current_sku: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
next_sku: Optional[pulumi.Input[str]] = None,
sddc_id: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering EsxiHost resources.
:param pulumi.Input[str] billing_contract_end_date: Current billing cycle end date. If nextSku is different from existing SKU, then we switch to newSKu after this contractEndDate Example: `2016-08-25T21:10:29.600Z`
:param pulumi.Input[str] compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment that contains the SDDC.
:param pulumi.Input[str] compute_instance_id: In terms of implementation, an ESXi host is a Compute instance that is configured with the chosen bundle of VMware software. The `computeInstanceId` is the [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of that Compute instance.
:param pulumi.Input[str] current_sku: Billing option selected during SDDC creation. Oracle Cloud Infrastructure VMware Solution supports the following billing interval SKUs: HOUR, MONTH, ONE_YEAR, and THREE_YEARS. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A descriptive name for the ESXi host. It's changeable. Esxi Host name requirements are 1-16 character length limit, Must start with a letter, Must be English letters, numbers, - only, No repeating hyphens, Must be unique within the SDDC.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] next_sku: (Updatable) Billing option to switch to once existing billing cycle ends. If nextSku is null or empty, currentSku will be used to continue with next billing term. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
:param pulumi.Input[str] sddc_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the SDDC to add the ESXi host to.
:param pulumi.Input[str] state: The current state of the ESXi host.
:param pulumi.Input[str] time_created: The date and time the ESXi host was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
:param pulumi.Input[str] time_updated: The date and time the ESXi host was updated, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339).
"""
if billing_contract_end_date is not None:
pulumi.set(__self__, "billing_contract_end_date", billing_contract_end_date)
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if compute_instance_id is not None:
pulumi.set(__self__, "compute_instance_id", compute_instance_id)
if current_sku is not None:
pulumi.set(__self__, "current_sku", current_sku)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if next_sku is not None:
pulumi.set(__self__, "next_sku", next_sku)
if sddc_id is not None:
pulumi.set(__self__, "sddc_id", sddc_id)
if state is not None:
pulumi.set(__self__, "state", state)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if time_updated is not None:
pulumi.set(__self__, "time_updated", time_updated)
@property
@pulumi.getter(name="billingContractEndDate")
def billing_contract_end_date(self) -> Optional[pulumi.Input[str]]:
"""
Current billing cycle end date. If nextSku is different from existing SKU, then we switch to newSKu after this contractEndDate Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "billing_contract_end_date")
@billing_contract_end_date.setter
def billing_contract_end_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "billing_contract_end_date", value)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment that contains the SDDC.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="computeInstanceId")
def compute_instance_id(self) -> Optional[pulumi.Input[str]]:
"""
In terms of implementation, an ESXi host is a Compute instance that is configured with the chosen bundle of VMware software. The `computeInstanceId` is the [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of that Compute instance.
"""
return pulumi.get(self, "compute_instance_id")
@compute_instance_id.setter
def compute_instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compute_instance_id", value)
@property
@pulumi.getter(name="currentSku")
def current_sku(self) -> Optional[pulumi.Input[str]]:
"""
Billing option selected during SDDC creation. Oracle Cloud Infrastructure VMware Solution supports the following billing interval SKUs: HOUR, MONTH, ONE_YEAR, and THREE_YEARS. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
"""
return pulumi.get(self, "current_sku")
@current_sku.setter
def current_sku(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "current_sku", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A descriptive name for the ESXi host. It's changeable. Esxi Host name requirements are 1-16 character length limit, Must start with a letter, Must be English letters, numbers, - only, No repeating hyphens, Must be unique within the SDDC.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="nextSku")
def next_sku(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Billing option to switch to once existing billing cycle ends. If nextSku is null or empty, currentSku will be used to continue with next billing term. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
"""
return pulumi.get(self, "next_sku")
@next_sku.setter
def next_sku(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "next_sku", value)
@property
@pulumi.getter(name="sddcId")
def sddc_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the SDDC to add the ESXi host to.
"""
return pulumi.get(self, "sddc_id")
@sddc_id.setter
def sddc_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sddc_id", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The current state of the ESXi host.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The date and time the ESXi host was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> Optional[pulumi.Input[str]]:
"""
The date and time the ESXi host was updated, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339).
"""
return pulumi.get(self, "time_updated")
@time_updated.setter
def time_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_updated", value)
class EsxiHost(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
current_sku: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
next_sku: Optional[pulumi.Input[str]] = None,
sddc_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource provides the Esxi Host resource in Oracle Cloud Infrastructure Oracle Cloud VMware Solution service.
Adds another ESXi host to an existing SDDC. The attributes of the specified
`Sddc` determine the VMware software and other configuration settings used
by the ESXi host.
Use the [WorkRequest](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/WorkRequest/) operations to track the
creation of the ESXi host.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_esxi_host = oci.ocvp.EsxiHost("testEsxiHost",
sddc_id=oci_ocvp_sddc["test_sddc"]["id"],
current_sku=var["esxi_host_current_sku"],
defined_tags={
"Operations.CostCenter": "42",
},
display_name=var["esxi_host_display_name"],
freeform_tags={
"Department": "Finance",
},
next_sku=var["esxi_host_next_sku"])
```
## Import
EsxiHosts can be imported using the `id`, e.g.
```sh
$ pulumi import oci:ocvp/esxiHost:EsxiHost test_esxi_host "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] current_sku: Billing option selected during SDDC creation. Oracle Cloud Infrastructure VMware Solution supports the following billing interval SKUs: HOUR, MONTH, ONE_YEAR, and THREE_YEARS. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A descriptive name for the ESXi host. It's changeable. Esxi Host name requirements are 1-16 character length limit, Must start with a letter, Must be English letters, numbers, - only, No repeating hyphens, Must be unique within the SDDC.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] next_sku: (Updatable) Billing option to switch to once existing billing cycle ends. If nextSku is null or empty, currentSku will be used to continue with next billing term. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
:param pulumi.Input[str] sddc_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the SDDC to add the ESXi host to.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EsxiHostArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Esxi Host resource in Oracle Cloud Infrastructure Oracle Cloud VMware Solution service.
Adds another ESXi host to an existing SDDC. The attributes of the specified
`Sddc` determine the VMware software and other configuration settings used
by the ESXi host.
Use the [WorkRequest](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/WorkRequest/) operations to track the
creation of the ESXi host.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_esxi_host = oci.ocvp.EsxiHost("testEsxiHost",
sddc_id=oci_ocvp_sddc["test_sddc"]["id"],
current_sku=var["esxi_host_current_sku"],
defined_tags={
"Operations.CostCenter": "42",
},
display_name=var["esxi_host_display_name"],
freeform_tags={
"Department": "Finance",
},
next_sku=var["esxi_host_next_sku"])
```
## Import
EsxiHosts can be imported using the `id`, e.g.
```sh
$ pulumi import oci:ocvp/esxiHost:EsxiHost test_esxi_host "id"
```
:param str resource_name: The name of the resource.
:param EsxiHostArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EsxiHostArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
current_sku: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
next_sku: Optional[pulumi.Input[str]] = None,
sddc_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EsxiHostArgs.__new__(EsxiHostArgs)
__props__.__dict__["current_sku"] = current_sku
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["display_name"] = display_name
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["next_sku"] = next_sku
if sddc_id is None and not opts.urn:
raise TypeError("Missing required property 'sddc_id'")
__props__.__dict__["sddc_id"] = sddc_id
__props__.__dict__["billing_contract_end_date"] = None
__props__.__dict__["compartment_id"] = None
__props__.__dict__["compute_instance_id"] = None
__props__.__dict__["state"] = None
__props__.__dict__["time_created"] = None
__props__.__dict__["time_updated"] = None
super(EsxiHost, __self__).__init__(
'oci:ocvp/esxiHost:EsxiHost',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
billing_contract_end_date: Optional[pulumi.Input[str]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
compute_instance_id: Optional[pulumi.Input[str]] = None,
current_sku: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
next_sku: Optional[pulumi.Input[str]] = None,
sddc_id: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None) -> 'EsxiHost':
"""
Get an existing EsxiHost resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] billing_contract_end_date: Current billing cycle end date. If nextSku is different from existing SKU, then we switch to newSKu after this contractEndDate Example: `2016-08-25T21:10:29.600Z`
:param pulumi.Input[str] compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment that contains the SDDC.
:param pulumi.Input[str] compute_instance_id: In terms of implementation, an ESXi host is a Compute instance that is configured with the chosen bundle of VMware software. The `computeInstanceId` is the [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of that Compute instance.
:param pulumi.Input[str] current_sku: Billing option selected during SDDC creation. Oracle Cloud Infrastructure VMware Solution supports the following billing interval SKUs: HOUR, MONTH, ONE_YEAR, and THREE_YEARS. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[str] display_name: (Updatable) A descriptive name for the ESXi host. It's changeable. Esxi Host name requirements are 1-16 character length limit, Must start with a letter, Must be English letters, numbers, - only, No repeating hyphens, Must be unique within the SDDC.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] next_sku: (Updatable) Billing option to switch to once existing billing cycle ends. If nextSku is null or empty, currentSku will be used to continue with next billing term. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
:param pulumi.Input[str] sddc_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the SDDC to add the ESXi host to.
:param pulumi.Input[str] state: The current state of the ESXi host.
:param pulumi.Input[str] time_created: The date and time the ESXi host was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
:param pulumi.Input[str] time_updated: The date and time the ESXi host was updated, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _EsxiHostState.__new__(_EsxiHostState)
__props__.__dict__["billing_contract_end_date"] = billing_contract_end_date
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["compute_instance_id"] = compute_instance_id
__props__.__dict__["current_sku"] = current_sku
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["display_name"] = display_name
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["next_sku"] = next_sku
__props__.__dict__["sddc_id"] = sddc_id
__props__.__dict__["state"] = state
__props__.__dict__["time_created"] = time_created
__props__.__dict__["time_updated"] = time_updated
return EsxiHost(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="billingContractEndDate")
def billing_contract_end_date(self) -> pulumi.Output[str]:
"""
Current billing cycle end date. If nextSku is different from existing SKU, then we switch to newSKu after this contractEndDate Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "billing_contract_end_date")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment that contains the SDDC.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="computeInstanceId")
def compute_instance_id(self) -> pulumi.Output[str]:
"""
In terms of implementation, an ESXi host is a Compute instance that is configured with the chosen bundle of VMware software. The `computeInstanceId` is the [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of that Compute instance.
"""
return pulumi.get(self, "compute_instance_id")
@property
@pulumi.getter(name="currentSku")
def current_sku(self) -> pulumi.Output[str]:
"""
Billing option selected during SDDC creation. Oracle Cloud Infrastructure VMware Solution supports the following billing interval SKUs: HOUR, MONTH, ONE_YEAR, and THREE_YEARS. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
"""
return pulumi.get(self, "current_sku")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
(Updatable) A descriptive name for the ESXi host. It's changeable. Esxi Host name requirements are 1-16 character length limit, Must start with a letter, Must be English letters, numbers, - only, No repeating hyphens, Must be unique within the SDDC.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter(name="nextSku")
def next_sku(self) -> pulumi.Output[str]:
"""
(Updatable) Billing option to switch to once existing billing cycle ends. If nextSku is null or empty, currentSku will be used to continue with next billing term. [ListSupportedSkus](https://docs.cloud.oracle.com/iaas/api/#/en/vmware/20200501/SupportedSkuSummary/ListSupportedSkus).
"""
return pulumi.get(self, "next_sku")
@property
@pulumi.getter(name="sddcId")
def sddc_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the SDDC to add the ESXi host to.
"""
return pulumi.get(self, "sddc_id")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current state of the ESXi host.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The date and time the ESXi host was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). Example: `2016-08-25T21:10:29.600Z`
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> pulumi.Output[str]:
"""
The date and time the ESXi host was updated, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339).
"""
return pulumi.get(self, "time_updated")
| 58.332258 | 347 | 0.683238 | 4,636 | 36,166 | 5.159189 | 0.062554 | 0.054269 | 0.05268 | 0.052429 | 0.917468 | 0.899323 | 0.888243 | 0.877122 | 0.866711 | 0.835354 | 0 | 0.012817 | 0.206133 | 36,166 | 619 | 348 | 58.426494 | 0.820243 | 0.50965 | 0 | 0.670659 | 1 | 0 | 0.102421 | 0.013623 | 0 | 0 | 0 | 0 | 0 | 1 | 0.164671 | false | 0.002994 | 0.01497 | 0 | 0.281437 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
047e002f2df053a95136b66bd497db5f068fc28c | 452 | py | Python | 12-Joining Data With Pandas/Chapter_2/02-Enriching a dataset.py | Pegasus-01/Data-manipulation-and-merging-with-pandas | 5346678d25820d9fe352bd70294484ecd96fccf7 | [
"Apache-2.0"
] | 1 | 2020-10-18T16:42:28.000Z | 2020-10-18T16:42:28.000Z | 12-Joining Data With Pandas/Chapter_2/02-Enriching a dataset.py | Pegasus-01/Data-manipulation-and-merging-with-pandas | 5346678d25820d9fe352bd70294484ecd96fccf7 | [
"Apache-2.0"
] | null | null | null | 12-Joining Data With Pandas/Chapter_2/02-Enriching a dataset.py | Pegasus-01/Data-manipulation-and-merging-with-pandas | 5346678d25820d9fe352bd70294484ecd96fccf7 | [
"Apache-2.0"
] | null | null | null | #part1
# Merge the toy_story and taglines tables with a left join
toystory_tag = toy_story.merge(taglines,on='id',how='left')
# Print the rows and shape of toystory_tag
print(toystory_tag)
print(toystory_tag.shape)
#part2
# Merge the toy_story and taglines tables with a inner join
toystory_tag = toy_story.merge(taglines,on='id',how='inner')
# Print the rows and shape of toystory_tag
print(toystory_tag)
print(toystory_tag.shape)
| 26.588235 | 61 | 0.756637 | 74 | 452 | 4.459459 | 0.310811 | 0.266667 | 0.193939 | 0.290909 | 0.915152 | 0.915152 | 0.915152 | 0.915152 | 0.915152 | 0.684848 | 0 | 0.005236 | 0.154867 | 452 | 16 | 62 | 28.25 | 0.858639 | 0.457965 | 0 | 0.666667 | 0 | 0 | 0.058296 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.666667 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 10 |
04eb64d71d585625dca5589cbe49794f6d3774c9 | 34,269 | py | Python | tests/test_gitparser.py | SteffenTunkel/vcsSHARK | 2c8fd03347b10a99f5339c1d99cd051ef59793ac | [
"Apache-2.0"
] | 1 | 2020-10-08T12:02:40.000Z | 2020-10-08T12:02:40.000Z | tests/test_gitparser.py | SteffenTunkel/vcsSHARK | 2c8fd03347b10a99f5339c1d99cd051ef59793ac | [
"Apache-2.0"
] | 6 | 2018-09-10T10:28:42.000Z | 2022-01-22T08:52:33.000Z | tests/test_gitparser.py | SteffenTunkel/vcsSHARK | 2c8fd03347b10a99f5339c1d99cd051ef59793ac | [
"Apache-2.0"
] | 4 | 2018-03-15T22:06:32.000Z | 2022-03-01T00:39:03.000Z | import unittest
import logging
import os
import datetime
from pyvcsshark.parser.gitparser import GitParser
from tests.datastoremock import DatastoreMock
class GitParserTest(unittest.TestCase):
parser = None
def setUp(self):
# Setup logging
logging.basicConfig(level=logging.ERROR)
self.parser = GitParser()
self.parser.detect(os.path.dirname(os.path.realpath(__file__))+"/data/testdatarepository")
pass
def test_detect(self):
self.assertFalse(self.parser.detect("./nonsense/path"))
pass
def test_repositoryType(self):
self.assertEqual(self.parser.repository_type, "git")
pass
class GitParserCommitsTest(GitParserTest):
list_of_commits = []
@classmethod
def setUpClass(cls):
# Setup logging
logging.basicConfig(level=logging.ERROR)
cls.parser = GitParser()
cls.parser.detect(os.path.dirname(os.path.realpath(__file__))+"/data/testdatarepository")
cls.parser.initialize()
datastore = DatastoreMock()
cls.parser.parse(os.path.dirname(os.path.realpath(__file__))+"/data/testdatarepository", datastore, 2)
# get the commits from our mockdatastore
queue = datastore.get_commit_queue()
while not queue.empty():
cls.list_of_commits.append(queue.get())
# sort the generated list
cls.list_of_commits.sort(key=lambda x: x.committerDate)
def test_parsing_commit1(self):
commit1 = self.list_of_commits[0]
# Checking commit attributes
self.assertEqual("3c0a6fc133b8b50b8c217642fef7eb948f29b690", commit1.id)
self.assertListEqual([], commit1.parents)
self.assertEqual("first commit\n", commit1.message)
self.assertListEqual([], commit1.tags)
# Checking branches
list_of_branch_names = []
for branchModel in commit1.branches:
list_of_branch_names.append(branchModel.name)
self.assertEqual(3, len(commit1.branches))
self.assertIn("refs/heads/master", list_of_branch_names)
self.assertIn("refs/remotes/origin/HEAD", list_of_branch_names)
self.assertIn("refs/remotes/origin/master", list_of_branch_names)
# Check times
self.assertEqual(datetime.datetime.utcfromtimestamp(1453374841), commit1.authorDate)
self.assertEqual(datetime.datetime.utcfromtimestamp(1453374841), commit1.committerDate)
self.assertEqual(60, commit1.authorOffset)
self.assertEqual(60, commit1.committerOffset)
# Check author
self.assertEqual("Fabian Trautsch", commit1.author.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.author.email)
# Check committer
self.assertEqual("Fabian Trautsch", commit1.committer.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.committer.email)
# Check changed files
self.assertEqual(3, len(commit1.changedFiles))
# lib.jar
test_file = [file for file in commit1.changedFiles if file.path == "lib.jar"][0]
self.assertEqual("lib.jar", test_file.path)
self.assertEqual(0, test_file.linesAdded)
self.assertEqual(0, test_file.linesDeleted)
self.assertTrue(test_file.isBinary)
self.assertEqual("A", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(30747, test_file.size)
self.assertEqual(None, test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 0)
# test.txt
test_file = [file for file in commit1.changedFiles if file.path == "test.txt"][0]
self.assertEqual("test.txt", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(0, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("A", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(6, test_file.size)
self.assertEqual(None, test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(1, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(0, test_file.hunks[0].old_start)
self.assertEqual(0, test_file.hunks[0].old_lines)
self.assertEqual("+test1\n", test_file.hunks[0].content)
# test2.txt
test_file = [file for file in commit1.changedFiles if file.path == "test2.txt"][0]
self.assertEqual("test2.txt", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(0, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("A", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(6, test_file.size)
self.assertEqual(None, test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(1, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(0, test_file.hunks[0].old_start)
self.assertEqual(0, test_file.hunks[0].old_lines)
self.assertEqual("+test2\n", test_file.hunks[0].content)
def test_parsing_commit2(self):
commit1 = self.list_of_commits[1]
# Checking commit attributes
self.assertEqual("022a1584a31ccc0816d20bfbbeb5c45aa290c7dd", commit1.id)
self.assertListEqual(['3c0a6fc133b8b50b8c217642fef7eb948f29b690'], commit1.parents)
self.assertEqual("second commit\n", commit1.message)
self.assertListEqual([], commit1.tags)
# Checking branches
list_of_branch_names = []
for branchModel in commit1.branches:
list_of_branch_names.append(branchModel.name)
self.assertEqual(3, len(commit1.branches))
self.assertIn("refs/heads/master", list_of_branch_names)
self.assertIn("refs/remotes/origin/HEAD", list_of_branch_names)
self.assertIn("refs/remotes/origin/master", list_of_branch_names)
# Check times
self.assertEqual(datetime.datetime.utcfromtimestamp(1453375367), commit1.authorDate)
self.assertEqual(datetime.datetime.utcfromtimestamp(1453375367), commit1.committerDate)
self.assertEqual(60, commit1.authorOffset)
self.assertEqual(60, commit1.committerOffset)
# Check author
self.assertEqual("Fabian Trautsch", commit1.author.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.author.email)
# Check committer
self.assertEqual("Fabian Trautsch", commit1.committer.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.committer.email)
# Check changed files
self.assertEqual(3, len(commit1.changedFiles))
# test3.txt
test_file = [file for file in commit1.changedFiles if file.path == "test3.txt"][0]
self.assertEqual("test3.txt", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(0, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("A", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(6, test_file.size)
self.assertEqual('3c0a6fc133b8b50b8c217642fef7eb948f29b690', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
# test.txt
test_file = [file for file in commit1.changedFiles if file.path == "test.txt"][0]
self.assertEqual("test.txt", test_file.path)
self.assertEqual(0, test_file.linesAdded)
self.assertEqual(1, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("D", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(0, test_file.size)
self.assertEqual('3c0a6fc133b8b50b8c217642fef7eb948f29b690', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(0, test_file.hunks[0].new_lines)
self.assertEqual(0, test_file.hunks[0].new_start)
self.assertEqual(1, test_file.hunks[0].old_start)
self.assertEqual(1, test_file.hunks[0].old_lines)
self.assertEqual("-test1\n", test_file.hunks[0].content)
# test2.txt
test_file = [file for file in commit1.changedFiles if file.path == "test2.txt"][0]
self.assertEqual("test2.txt", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(1, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("M", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(5, test_file.size)
self.assertEqual('3c0a6fc133b8b50b8c217642fef7eb948f29b690', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(1, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(1, test_file.hunks[0].old_start)
self.assertEqual(1, test_file.hunks[0].old_lines)
self.assertEqual("-test2\n+test\n", test_file.hunks[0].content)
def test_parsing_commit3(self):
commit1 = self.list_of_commits[2]
# Checking commit attributes
self.assertEqual("5ed91aa4557b5042fa7096bf6c69463024c46b6f", commit1.id)
self.assertListEqual(['022a1584a31ccc0816d20bfbbeb5c45aa290c7dd'], commit1.parents)
self.assertEqual("test$#.*;ßöä%!&\n", commit1.message)
# Checking branches
list_of_branch_names = []
for branchModel in commit1.branches:
list_of_branch_names.append(branchModel.name)
self.assertEqual(3, len(commit1.branches))
self.assertIn("refs/heads/master", list_of_branch_names)
self.assertIn("refs/remotes/origin/HEAD", list_of_branch_names)
self.assertIn("refs/remotes/origin/master", list_of_branch_names)
# Check times
self.assertEqual(datetime.datetime.utcfromtimestamp(1453375768), commit1.authorDate)
self.assertEqual(datetime.datetime.utcfromtimestamp(1453375768), commit1.committerDate)
self.assertEqual(60, commit1.authorOffset)
self.assertEqual(60, commit1.committerOffset)
# Check author
self.assertEqual("Fabian Trautsch", commit1.author.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.author.email)
# Check committer
self.assertEqual("Fabian Trautsch", commit1.committer.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.committer.email)
# Check changed files
self.assertEqual(1, len(commit1.changedFiles))
# test3.txt
test_file = [file for file in commit1.changedFiles if file.path == "program.py"][0]
self.assertEqual("program.py", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(0, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("A", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(15, test_file.size)
self.assertEqual('022a1584a31ccc0816d20bfbbeb5c45aa290c7dd', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(1, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(0, test_file.hunks[0].old_start)
self.assertEqual(0, test_file.hunks[0].old_lines)
self.assertEqual("+import nothing\n", test_file.hunks[0].content)
def test_parsing_commit4(self):
commit1 = self.list_of_commits[3]
# Checking commit attributes
self.assertEqual("6fe2eff1f0bbc3220128e082385a01558e3306a6", commit1.id)
self.assertListEqual(['5ed91aa4557b5042fa7096bf6c69463024c46b6f'], commit1.parents)
self.assertEqual("moved\n", commit1.message)
self.assertListEqual([], commit1.tags)
# Checking branches
list_of_branch_names = []
for branchModel in commit1.branches:
list_of_branch_names.append(branchModel.name)
self.assertEqual(3, len(commit1.branches))
self.assertIn("refs/heads/master", list_of_branch_names)
self.assertIn("refs/remotes/origin/HEAD", list_of_branch_names)
self.assertIn("refs/remotes/origin/master", list_of_branch_names)
# Check times
self.assertEqual(datetime.datetime.utcfromtimestamp(1453379814), commit1.authorDate)
self.assertEqual(datetime.datetime.utcfromtimestamp(1453379814), commit1.committerDate)
self.assertEqual(60, commit1.authorOffset)
self.assertEqual(60, commit1.committerOffset)
# Check author
self.assertEqual("Fabian Trautsch", commit1.author.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.author.email)
# Check committer
self.assertEqual("Fabian Trautsch", commit1.committer.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.committer.email)
# Check changed files
self.assertEqual(2, len(commit1.changedFiles))
# lib.jar
test_file = [file for file in commit1.changedFiles if file.path == "libs/lib.jar"][0]
self.assertEqual("libs/lib.jar", test_file.path)
self.assertEqual(0, test_file.linesAdded)
self.assertEqual(0, test_file.linesDeleted)
self.assertTrue(test_file.isBinary)
self.assertEqual("R", test_file.mode)
self.assertEqual('lib.jar', test_file.oldPath)
self.assertEqual(30747, test_file.size)
self.assertEqual('5ed91aa4557b5042fa7096bf6c69463024c46b6f', test_file.parent_revision_hash)
# Hunks
self.assertEqual(0, len(test_file.hunks))
# program1.py
test_file = [file for file in commit1.changedFiles if file.path == "program1.py"][0]
self.assertEqual("program1.py", test_file.path)
self.assertEqual(0, test_file.linesAdded)
self.assertEqual(0, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("R", test_file.mode)
self.assertEqual('program.py', test_file.oldPath)
self.assertEqual(15, test_file.size)
self.assertEqual('5ed91aa4557b5042fa7096bf6c69463024c46b6f', test_file.parent_revision_hash)
# hunks
self.assertEqual(0, len(test_file.hunks))
def test_parsing_commit5(self):
commit1 = self.list_of_commits[4]
# Checking commit attributes
self.assertEqual("a8dfa0944a8c3d97f217d34705de2ae1c7e68793", commit1.id)
self.assertListEqual(['6fe2eff1f0bbc3220128e082385a01558e3306a6'], commit1.parents)
self.assertEqual("branch3\n", commit1.message)
# Checking branches
list_of_branch_names = []
for branchModel in commit1.branches:
list_of_branch_names.append(branchModel.name)
self.assertEqual(3, len(commit1.branches))
self.assertIn("refs/heads/master", list_of_branch_names)
self.assertIn("refs/remotes/origin/HEAD", list_of_branch_names)
self.assertIn("refs/remotes/origin/master", list_of_branch_names)
# Check times
self.assertEqual(datetime.datetime.utcfromtimestamp(1453380347), commit1.authorDate)
self.assertEqual(datetime.datetime.utcfromtimestamp(1453380347), commit1.committerDate)
self.assertEqual(60, commit1.authorOffset)
self.assertEqual(60, commit1.committerOffset)
# Check author
self.assertEqual("Fabian Trautsch", commit1.author.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.author.email)
# Check committer
self.assertEqual("Fabian Trautsch", commit1.committer.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.committer.email)
# Check changed files
self.assertEqual(1, len(commit1.changedFiles))
# branch1.txt
test_file = [file for file in commit1.changedFiles if file.path == "branch3.txt"][0]
self.assertEqual("branch3.txt", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(0, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("A", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(8, test_file.size)
self.assertEqual('6fe2eff1f0bbc3220128e082385a01558e3306a6', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(1, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(0, test_file.hunks[0].old_start)
self.assertEqual(0, test_file.hunks[0].old_lines)
self.assertEqual("+branch3\n", test_file.hunks[0].content)
def test_parsing_commit6(self):
commit1 = self.list_of_commits[5]
# Checking commit attributes
self.assertEqual("e91b0419196248c664f2b2e06c9a2c97452fda5c", commit1.id)
self.assertListEqual(['a8dfa0944a8c3d97f217d34705de2ae1c7e68793'], commit1.parents)
self.assertEqual("testbranch3\n", commit1.message)
self.assertListEqual([], commit1.tags)
# Checking branches
list_of_branch_names = []
for branchModel in commit1.branches:
list_of_branch_names.append(branchModel.name)
self.assertEqual(3, len(commit1.branches))
self.assertIn("refs/heads/master", list_of_branch_names)
self.assertIn("refs/remotes/origin/HEAD", list_of_branch_names)
self.assertIn("refs/remotes/origin/master", list_of_branch_names)
# Check times
self.assertEqual(datetime.datetime.utcfromtimestamp(1453380366), commit1.authorDate)
self.assertEqual(datetime.datetime.utcfromtimestamp(1453380366), commit1.committerDate)
self.assertEqual(60, commit1.authorOffset)
self.assertEqual(60, commit1.committerOffset)
# Check author
self.assertEqual("Fabian Trautsch", commit1.author.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.author.email)
# Check committer
self.assertEqual("Fabian Trautsch", commit1.committer.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.committer.email)
# Check changed files
self.assertEqual(1, len(commit1.changedFiles))
# testbranch3.txt
test_file = [file for file in commit1.changedFiles if file.path == "testbranch3.txt"][0]
self.assertEqual("testbranch3.txt", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(0, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("A", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(12, test_file.size)
self.assertEqual('a8dfa0944a8c3d97f217d34705de2ae1c7e68793', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(1, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(0, test_file.hunks[0].old_start)
self.assertEqual(0, test_file.hunks[0].old_lines)
self.assertEqual("+testbranch3\n", test_file.hunks[0].content)
def test_parsing_commit7(self):
commit1 = self.list_of_commits[6]
# Checking commit attributes
self.assertEqual("204d306b10e123f2474612a297b83be6ac79e519", commit1.id)
self.assertListEqual(['e91b0419196248c664f2b2e06c9a2c97452fda5c'], commit1.parents)
self.assertEqual("lines\n", commit1.message)
self.assertListEqual([], commit1.tags)
# Checking branches
list_of_branch_names = []
for branchModel in commit1.branches:
list_of_branch_names.append(branchModel.name)
self.assertEqual(3, len(commit1.branches))
self.assertIn("refs/heads/master", list_of_branch_names)
self.assertIn("refs/remotes/origin/HEAD", list_of_branch_names)
self.assertIn("refs/remotes/origin/master", list_of_branch_names)
# Check times
self.assertEqual(datetime.datetime.utcfromtimestamp(1453380546), commit1.authorDate)
self.assertEqual(datetime.datetime.utcfromtimestamp(1453380546), commit1.committerDate)
self.assertEqual(60, commit1.authorOffset)
self.assertEqual(60, commit1.committerOffset)
# Check author
self.assertEqual("Fabian Trautsch", commit1.author.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.author.email)
# Check committer
self.assertEqual("Fabian Trautsch", commit1.committer.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.committer.email)
# Check changed files
self.assertEqual(1, len(commit1.changedFiles))
# lines.txt
test_file = [file for file in commit1.changedFiles if file.path == "lines.txt"][0]
self.assertEqual("lines.txt", test_file.path)
self.assertEqual(40, test_file.linesAdded)
self.assertEqual(0, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("A", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(271, test_file.size)
self.assertEqual('e91b0419196248c664f2b2e06c9a2c97452fda5c', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(40, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(0, test_file.hunks[0].old_start)
self.assertEqual(0, test_file.hunks[0].old_lines)
self.assertEqual("+line1\n+line2\n+line3\n+line4\n+line5\n+line6\n+line7\n+line8\n+line9\n+line10\n" +
"+line11\n+line12\n+line13\n+line14\n+line15\n+line16\n+line17\n+line18\n+line19\n+line20\n" +
"+line21\n+line22\n+line23\n+line24\n+line25\n+line26\n+line27\n+line28\n+line29\n+line30\n" +
"+line31\n+line32\n+line33\n+line34\n+line35\n+line36\n+line37\n+line38\n+line39\n+line40\n",
test_file.hunks[0].content)
def test_parsing_commit8(self):
commit1 = self.list_of_commits[7]
# Checking commit attributes
self.assertEqual("830c29f111f261e26897d42e94c15960a512c0e4", commit1.id)
self.assertListEqual(['204d306b10e123f2474612a297b83be6ac79e519'], commit1.parents)
self.assertEqual("changed lines\n", commit1.message)
self.assertListEqual([], commit1.tags)
# Checking branches
list_of_branch_names = []
for branchModel in commit1.branches:
list_of_branch_names.append(branchModel.name)
self.assertEqual(3, len(commit1.branches))
self.assertIn("refs/heads/master", list_of_branch_names)
self.assertIn("refs/remotes/origin/HEAD", list_of_branch_names)
self.assertIn("refs/remotes/origin/master", list_of_branch_names)
# Check times
self.assertEqual(datetime.datetime.utcfromtimestamp(1453381291), commit1.authorDate)
self.assertEqual(datetime.datetime.utcfromtimestamp(1453381291), commit1.committerDate)
self.assertEqual(60, commit1.authorOffset)
self.assertEqual(60, commit1.committerOffset)
# Check author
self.assertEqual("Fabian Trautsch", commit1.author.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.author.email)
# Check committer
self.assertEqual("Fabian Trautsch", commit1.committer.name)
self.assertEqual("ftrautsch@googlemail.com", commit1.committer.email)
# Check changed files
self.assertEqual(1, len(commit1.changedFiles))
# branch1.txt
test_file = [file for file in commit1.changedFiles if file.path == "lines.txt"][0]
self.assertEqual("lines.txt", test_file.path)
self.assertEqual(2, test_file.linesAdded)
self.assertEqual(2, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("M", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(266, test_file.size)
self.assertEqual('204d306b10e123f2474612a297b83be6ac79e519', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 3)
self.assertEqual(0, test_file.hunks[0].new_lines)
self.assertEqual(0, test_file.hunks[0].new_start)
self.assertEqual(1, test_file.hunks[0].old_start)
self.assertEqual(1, test_file.hunks[0].old_lines)
self.assertEqual("-line1\n", test_file.hunks[0].content)
self.assertEqual(1, test_file.hunks[1].new_lines)
self.assertEqual(19, test_file.hunks[1].new_start)
self.assertEqual(20, test_file.hunks[1].old_start)
self.assertEqual(1, test_file.hunks[1].old_lines)
self.assertEqual("-line20\n+\n", test_file.hunks[1].content)
self.assertEqual(1, test_file.hunks[2].new_lines)
self.assertEqual(40, test_file.hunks[2].new_start)
self.assertEqual(40, test_file.hunks[2].old_start)
self.assertEqual(0, test_file.hunks[2].old_lines)
self.assertEqual("+line41\n", test_file.hunks[2].content)
def test_merge_commit(self):
commit1 = self.list_of_commits[11]
# Checking commit attributes
self.assertEqual("c298c565ac291bb3fd8e74da9798cc5d9a49e4e5", commit1.id)
self.assertListEqual(['88750d1abeff99a162801d2a38a1ffa3b0f0d759', '6730077bc66400329f01a06c27e7dc9163f14f3f'],
commit1.parents)
self.assertEqual("blub\n", commit1.message)
self.assertListEqual([], commit1.tags)
# Checking branches
list_of_branch_names = []
for branchModel in commit1.branches:
list_of_branch_names.append(branchModel.name)
self.assertEqual(3, len(commit1.branches))
self.assertIn("refs/heads/master", list_of_branch_names)
self.assertIn("refs/remotes/origin/HEAD", list_of_branch_names)
self.assertIn("refs/remotes/origin/master", list_of_branch_names)
# Check times
self.assertEqual(datetime.datetime.utcfromtimestamp(1549887589), commit1.authorDate)
self.assertEqual(datetime.datetime.utcfromtimestamp(1549887589), commit1.committerDate)
self.assertEqual(60, commit1.authorOffset)
self.assertEqual(60, commit1.committerOffset)
# Check author
self.assertEqual("Fabian Trautsch", commit1.author.name)
self.assertEqual("fabian.trautsch@informatik.uni-goettingen.de", commit1.author.email)
# Check committer
self.assertEqual("Fabian Trautsch", commit1.committer.name)
self.assertEqual("fabian.trautsch@informatik.uni-goettingen.de", commit1.committer.email)
# Check changed files
self.assertEqual(2, len(commit1.changedFiles))
# lines.txt
test_file = [file for file in commit1.changedFiles if file.path == "lines.txt"][0]
self.assertEqual("lines.txt", test_file.path)
self.assertEqual(2, test_file.linesAdded)
self.assertEqual(2, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("M", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(266, test_file.size)
self.assertEqual('6730077bc66400329f01a06c27e7dc9163f14f3f', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(2, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(1, test_file.hunks[0].old_start)
self.assertEqual(2, test_file.hunks[0].old_lines)
self.assertEqual("-line1\n-line3\n+line2\n+line4\n", test_file.hunks[0].content)
# test2.txt
test_file = [file for file in commit1.changedFiles if file.path == "test2.txt"][0]
self.assertEqual("test2.txt", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(1, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("M", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(6, test_file.size)
self.assertEqual('88750d1abeff99a162801d2a38a1ffa3b0f0d759', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(1, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(1, test_file.hunks[0].old_start)
self.assertEqual(1, test_file.hunks[0].old_lines)
self.assertEqual("-test\n+test2\n", test_file.hunks[0].content)
def test_merge_commit_changed_on_master_and_feature_branch(self):
commit1 = self.list_of_commits[15]
# Checking commit attributes
self.assertEqual("b104c54f5f7e4614afa4b9cf3e7e21f0050abc1c", commit1.id)
self.assertListEqual(['09788da8a216799a6d611a9b06bd54162b44c5d2', 'd4aba22cd313977e5e6e6b4f915df0bce6ac7468'],
commit1.parents)
self.assertEqual("Merge branch 'lines'\n", commit1.message)
self.assertListEqual([], commit1.tags)
# Checking branches
list_of_branch_names = []
for branchModel in commit1.branches:
list_of_branch_names.append(branchModel.name)
self.assertEqual(3, len(commit1.branches))
self.assertIn("refs/heads/master", list_of_branch_names)
self.assertIn("refs/remotes/origin/HEAD", list_of_branch_names)
self.assertIn("refs/remotes/origin/master", list_of_branch_names)
# Check times
self.assertEqual(datetime.datetime.utcfromtimestamp(1549889088), commit1.authorDate)
self.assertEqual(datetime.datetime.utcfromtimestamp(1549889088), commit1.committerDate)
self.assertEqual(60, commit1.authorOffset)
self.assertEqual(60, commit1.committerOffset)
# Check author
self.assertEqual("Fabian Trautsch", commit1.author.name)
self.assertEqual("fabian.trautsch@informatik.uni-goettingen.de", commit1.author.email)
# Check committer
self.assertEqual("Fabian Trautsch", commit1.committer.name)
self.assertEqual("fabian.trautsch@informatik.uni-goettingen.de", commit1.committer.email)
# Check changed files
self.assertEqual(3, len(commit1.changedFiles))
# lines.txt (master branch)
test_file = [file for file in commit1.changedFiles if file.path == "lines.txt"][0]
self.assertEqual("lines.txt", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(1, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("M", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(266, test_file.size)
self.assertEqual('09788da8a216799a6d611a9b06bd54162b44c5d2', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(1, test_file.hunks[0].new_lines)
self.assertEqual(40, test_file.hunks[0].new_start)
self.assertEqual(40, test_file.hunks[0].old_start)
self.assertEqual(1, test_file.hunks[0].old_lines)
self.assertEqual("-line41\n+line42\n", test_file.hunks[0].content)
# lines.txt (feature branch)
test_file = [file for file in commit1.changedFiles if file.path == "lines.txt"][1]
self.assertEqual("lines.txt", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(1, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("M", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(266, test_file.size)
self.assertEqual('d4aba22cd313977e5e6e6b4f915df0bce6ac7468', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(1, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(1, test_file.hunks[0].old_start)
self.assertEqual(1, test_file.hunks[0].old_lines)
self.assertEqual("-line2\n+line1\n", test_file.hunks[0].content)
# test2.txt
test_file = [file for file in commit1.changedFiles if file.path == "test2.txt"][0]
self.assertEqual("test2.txt", test_file.path)
self.assertEqual(1, test_file.linesAdded)
self.assertEqual(1, test_file.linesDeleted)
self.assertFalse(test_file.isBinary)
self.assertEqual("M", test_file.mode)
self.assertEqual(None, test_file.oldPath)
self.assertEqual(6, test_file.size)
self.assertEqual('09788da8a216799a6d611a9b06bd54162b44c5d2', test_file.parent_revision_hash)
# Hunks
self.assertEqual(len(test_file.hunks), 1)
self.assertEqual(1, test_file.hunks[0].new_lines)
self.assertEqual(1, test_file.hunks[0].new_start)
self.assertEqual(1, test_file.hunks[0].old_start)
self.assertEqual(1, test_file.hunks[0].old_lines)
self.assertEqual("-test2\n+test3\n", test_file.hunks[0].content)
if __name__ == "__main__":
unittest.main()
| 44.854712 | 119 | 0.681432 | 3,927 | 34,269 | 5.796791 | 0.062643 | 0.227992 | 0.056537 | 0.043665 | 0.85622 | 0.830478 | 0.81216 | 0.766869 | 0.75839 | 0.7453 | 0 | 0.065441 | 0.20806 | 34,269 | 763 | 120 | 44.913499 | 0.773352 | 0.0431 | 0 | 0.678846 | 0 | 0.007692 | 0.126304 | 0.090392 | 0 | 0 | 0 | 0 | 0.794231 | 1 | 0.026923 | false | 0.005769 | 0.013462 | 0 | 0.048077 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8e060c845d559d92f39c4d9cca7a75f7923ddc56 | 21,880 | py | Python | models/kd_distill.py | JiwonCocoder/fine_tuning_resnet50 | 604bc7e757109db10cdc41b5db8bebdb28499e9f | [
"MIT"
] | null | null | null | models/kd_distill.py | JiwonCocoder/fine_tuning_resnet50 | 604bc7e757109db10cdc41b5db8bebdb28499e9f | [
"MIT"
] | 1 | 2021-06-18T09:14:17.000Z | 2021-06-18T09:14:17.000Z | models/kd_distill.py | JiwonCocoder/fine_tuning_resnet50 | 604bc7e757109db10cdc41b5db8bebdb28499e9f | [
"MIT"
] | null | null | null | import pdb
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.models as models
from torch.cuda.amp import autocast, GradScaler
import os
import contextlib
from train_utils import AverageMeter
from .fixmatch_utils import consistency_loss, Get_Scalar
from train_utils import ce_loss
from tqdm import tqdm
from .choose_network import choose_network
from torch.autograd import Variable
class RunningAverage():
"""A simple class that maintains the running average of a quantity
Example:
```
loss_avg = RunningAverage()
loss_avg.update(2)
loss_avg.update(4)
loss_avg() = 3
```
"""
def __init__(self):
self.steps = 0
self.total = 0
def update(self, val):
self.total += val
self.steps += 1
def __call__(self):
return self.total / float(self.steps)
def loss_fn_kd(outputs, labels, teacher_outputs, args):
"""
Compute the knowledge-distillation (KD) loss given outputs, labels.
"Hyperparameters": temperature and alpha
NOTE: the KL Divergence for PyTorch comparing the softmaxs of teacher
and student expects the input tensor to be log probabilities! See Issue #2
"""
alpha = args.alpha
T = args.temperature
num_lb = labels.shape[0]
assert num_lb == teacher_outputs.shape[0]
KD_loss = nn.KLDivLoss(reduction='batchmean')(F.log_softmax(outputs[:num_lb]/T, dim=1),
F.softmax(teacher_outputs/T, dim=1)) * (alpha * T * T) + \
F.cross_entropy(outputs[num_lb:], labels) * (1. - alpha)
return KD_loss
class KD_distill_unsup:
def __init__(self, args, num_classes, tb_log=None, logger=None):
"""
class Fixmatch contains setter of data_loader, optimizer, and model update methods.
Args:
net_builder: backbone network class (see net_builder in utils_from_git.py)
num_classes: # of label classes
ema_m: momentum of exponential moving average for eval_model
T: Temperature scaling parameter for output sharpening (only when hard_label = False)
p_cutoff: confidence cutoff parameters for loss masking
lambda_u: ratio of unsupervised loss to supervised loss
hard_label: If True, consistency regularization use a hard pseudo label.
it: initial iteration count
num_eval_iter: freqeuncy of iteration (after 500,000 iters)
tb_log: tensorboard writer (see train_utils.py)
logger: logger (see utils_from_git.py)
"""
super(KD_distill_unsup, self).__init__()
# momentum update param
self.loader = {}
self.num_classes = num_classes
#(future_work)
self.ema_m = 0.9
# create the encoders
# network is builded only by num_classes,
self.teacher_model = choose_network(args,args.net_from_name, args.student_net)
self.train_model = choose_network(args,args.net_from_name, args.student_net, args.pretrained_from, args.pretrained_model_dir)
self.eval_model = choose_network(args,args.net_from_name, args.student_net)
self.tb_log = tb_log
self.optimizer = None
self.scheduler = None
self.logger = logger
self.print_fn = print if logger is None else logger.info
# self.pretrained_train_model = models.resnet50(pretrained=True)
# for param_pretrained, param_model in zip(self.pretrained_train_model.parameters(), self.train_model.parameters()):
# param_model.data.copy_(param_pretrained.detach().data)
for param_q, param_k in zip(self.train_model.parameters(), self.eval_model.parameters()):
param_k.data.copy_(param_q.detach().data) # initialize
param_k.requires_grad = False # not update by gradient for eval_net
self.eval_model.eval()
@torch.no_grad()
def _eval_model_update(self):
"""
Momentum update of evaluation model (exponential moving average)
"""
for param_train, param_eval in zip(self.train_model.parameters(), self.eval_model.parameters()):
param_eval.copy_(param_eval * self.ema_m + param_train.detach() * (1 - self.ema_m))
for buffer_train, buffer_eval in zip(self.train_model.buffers(), self.eval_model.buffers()):
buffer_eval.copy_(buffer_train)
def set_data_loader(self, loader_dict):
self.loader_dict = loader_dict
self.print_fn(f'[!] data loader keys: {self.loader_dict.keys()}')
def set_optimizer(self, optimizer, scheduler=None):
self.optimizer = optimizer
self.scheduler = scheduler
def train(self, args, logger=None):
"""
Train function of FixMatch.
From data_loader, it inference training data, computes losses, and update the networks.
"""
ngpus_per_node = torch.cuda.device_count()
# lb: labeled, ulb: unlabeled
#teacher_model:freeze, train_model:training
self.teacher_model.cuda(args.gpu)
self.train_model.train()
self.teacher_model.eval()
# for gpu profiling
start_batch = torch.cuda.Event(enable_timing=True)
end_batch = torch.cuda.Event(enable_timing=True)
start_run = torch.cuda.Event(enable_timing=True)
end_run = torch.cuda.Event(enable_timing=True)
start_batch.record()
best_eval_acc, best_it = 0.0, 0
scaler = GradScaler()
amp_cm = autocast if args.amp else contextlib.nullcontext
loss_avg = RunningAverage()
with tqdm(total=len(self.loader_dict['train'])) as t:
#i is sample_id per minibatch
end_batch.record()
torch.cuda.synchronize()
for i, (x_lb, y_lb) in enumerate(self.loader_dict['train']):
start_run.record()
if args.gpu is not None:
x_lb, y_lb = x_lb.cuda(), y_lb.cuda()
#convert to torch variables
x_lb, y_lb = Variable(x_lb), Variable(y_lb)
output_batch = self.train_model(x_lb)
with torch.no_grad():
output_teacher_batch = self.teacher_model(x_lb)
if args.gpu is not None:
output_teacher_batch = output_teacher_batch.cuda()
total_loss = loss_fn_kd(output_batch, y_lb, output_teacher_batch, args)
# parameter updates
if args.amp:
scaler.scale(total_loss).backward()
scaler.step(self.optimizer)
scaler.update()
else:
total_loss.backward()
self.optimizer.step()
self.scheduler.step()
self.train_model.zero_grad()
with torch.no_grad():
self._eval_model_update()
# update the average loss
loss_avg.update(total_loss.data)
t.set_postfix(loss='{:05.3f}'.format(loss_avg()))
t.update()
end_run.record()
torch.cuda.synchronize()
# tensorboard_dict update
tb_dict = {}
tb_dict['train/total_loss'] = total_loss.detach()
tb_dict['lr'] = self.optimizer.param_groups[0]['lr']
tb_dict['train/prefecth_time'] = start_batch.elapsed_time(end_batch) / 1000.
tb_dict['train/run_time'] = start_run.elapsed_time(end_run) / 1000.
if self.it % self.num_eval_iter == 0:
eval_dict = self.evaluate(args=args)
tb_dict.update(eval_dict)
save_path = os.path.join(args.save_dir, args.save_name)
if tb_dict['eval/top-1-acc'] > best_eval_acc:
best_eval_acc = tb_dict['eval/top-1-acc']
best_it = self.it
self.print_fn(
f"{self.it} iteration, USE_EMA: {hasattr(self, 'eval_model')}, {tb_dict}, BEST_EVAL_ACC: {best_eval_acc}, at {best_it} iters")
if not args.multiprocessing_distributed or \
(args.multiprocessing_distributed and args.rank % ngpus_per_node == 0):
if self.it == best_it:
self.save_model('model_best.pth', save_path)
if not self.tb_log is None:
self.tb_log.update(tb_dict, self.it)
self.it += 1
del tb_dict
start_batch.record()
if self.it > 2 ** 19:
self.num_eval_iter = 1000
eval_dict = self.evaluate(args=args)
eval_dict.update({'eval/best_acc': best_eval_acc, 'eval/best_it': best_it})
return eval_dict
@torch.no_grad()
def evaluate(self, eval_loader=None, args=None):
use_ema = hasattr(self, 'eval_model')
eval_model = self.eval_model if use_ema else self.train_model
eval_model.eval()
if eval_loader is None:
eval_loader = self.loader_dict['eval']
total_loss = 0.0
total_acc = 0.0
total_num = 0.0
for x, y in eval_loader:
x, y = x.cuda(args.gpu), y.cuda(args.gpu)
num_batch = x.shape[0]
total_num += num_batch
logits = eval_model(x)
loss = F.cross_entropy(logits, y, reduction='mean')
acc = torch.sum(torch.max(logits, dim=-1)[1] == y)
# pdb.set_trace()
total_loss += loss.detach() * num_batch
total_acc += acc.detach()
if not use_ema:
eval_model.train()
return {'eval/loss': total_loss / total_num, 'eval/top-1-acc': total_acc / total_num}
def save_model(self, save_name, save_path):
save_filename = os.path.join(save_path, save_name)
train_model = self.train_model.module if hasattr(self.train_model, 'module') else self.train_model
eval_model = self.eval_model.module if hasattr(self.eval_model, 'module') else self.eval_model
torch.save({'train_model': train_model.state_dict(),
'eval_model': eval_model.state_dict(),
'optimizer': self.optimizer.state_dict(),
'scheduler': self.scheduler.state_dict(),
'it': self.it}, save_filename)
self.print_fn(f"model saved: {save_filename}")
def load_model(self, load_path):
checkpoint = torch.load(load_path)
train_model = self.train_model.module if hasattr(self.train_model, 'module') else self.train_model
eval_model = self.eval_model.module if hasattr(self.eval_model, 'module') else self.eval_model
for key in checkpoint.keys():
if hasattr(self, key) and getattr(self, key) is not None:
if 'train_model' in key:
train_model.load_state_dict(checkpoint[key])
elif 'eval_model' in key:
eval_model.load_state_dict(checkpoint[key])
elif key == 'it':
self.it = checkpoint[key]
elif key == 'scheduler':
self.scheduler.load_state_dict(checkpoint[key])
elif key == 'optimizer':
self.optimizer.load_state_dict(checkpoint[key])
else:
getattr(self, key).load_state_dict(checkpoint[key])
self.print_fn(f"Check Point Loading: {key} is LOADED")
else:
self.print_fn(f"Check Point Loading: {key} is **NOT** LOADED")
class KD_distill_semi:
def __init__(self, args, num_classes, tb_log=None, logger=None):
"""
class Fixmatch contains setter of data_loader, optimizer, and model update methods.
Args:
net_builder: backbone network class (see net_builder in utils_from_git.py)
num_classes: # of label classes
ema_m: momentum of exponential moving average for eval_model
T: Temperature scaling parameter for output sharpening (only when hard_label = False)
p_cutoff: confidence cutoff parameters for loss masking
lambda_u: ratio of unsupervised loss to supervised loss
hard_label: If True, consistency regularization use a hard pseudo label.
it: initial iteration count
num_eval_iter: freqeuncy of iteration (after 500,000 iters)
tb_log: tensorboard writer (see train_utils.py)
logger: logger (see utils_from_git.py)
"""
super(KD_distill_semi, self).__init__()
# momentum update param
self.loader = {}
self.num_classes = num_classes
# (future_work)
self.ema_m = 0.9
# create the encoders
# network is builded only by num_classes,
self.teacher_model = choose_network(args,args.net_from_name, args.student_net)
self.train_model = choose_network(args,args.net_from_name, args.student_net, args.pretrained_from,
args.pretrained_model_dir)
self.eval_model = choose_network(args,args.net_from_name, args.student_net)
self.tb_log = tb_log
self.optimizer = None
self.scheduler = None
self.it = 0
self.num_eval_iter = args.num_eval_iter
self.logger = logger
self.print_fn = print if logger is None else logger.info
# self.pretrained_train_model = models.resnet50(pretrained=True)
# for param_pretrained, param_model in zip(self.pretrained_train_model.parameters(), self.train_model.parameters()):
# param_model.data.copy_(param_pretrained.detach().data)
for param_q, param_k in zip(self.train_model.parameters(), self.eval_model.parameters()):
param_k.data.copy_(param_q.detach().data) # initialize
param_k.requires_grad = False # not update by gradient for eval_net
self.eval_model.eval()
@torch.no_grad()
def _eval_model_update(self):
"""
Momentum update of evaluation model (exponential moving average)
"""
for param_train, param_eval in zip(self.train_model.parameters(), self.eval_model.parameters()):
param_eval.copy_(param_eval * self.ema_m + param_train.detach() * (1 - self.ema_m))
for buffer_train, buffer_eval in zip(self.train_model.buffers(), self.eval_model.buffers()):
buffer_eval.copy_(buffer_train)
def set_data_loader(self, loader_dict):
self.loader_dict = loader_dict
self.print_fn(f'[!] data loader keys: {self.loader_dict.keys()}')
def set_optimizer(self, optimizer, scheduler=None):
self.optimizer = optimizer
self.scheduler = scheduler
def train(self, args, logger=None):
"""
Train function of FixMatch.
From data_loader, it inference training data, computes losses, and update the networks.
"""
ngpus_per_node = torch.cuda.device_count()
# lb: labeled, ulb: unlabeled
# teacher_model:freeze, train_model:training
self.teacher_model.cuda(args.gpu)
self.train_model.train()
self.teacher_model.eval()
# for gpu profiling
start_batch = torch.cuda.Event(enable_timing=True)
end_batch = torch.cuda.Event(enable_timing=True)
start_run = torch.cuda.Event(enable_timing=True)
end_run = torch.cuda.Event(enable_timing=True)
start_batch.record()
best_eval_acc, best_it = 0.0, 0
scaler = GradScaler()
amp_cm = autocast if args.amp else contextlib.nullcontext
loss_avg = RunningAverage()
end_batch.record()
torch.cuda.synchronize()
for (x_lb, y_lb), (x_ulb, _, _) in zip(self.loader_dict['train_lb'], self.loader_dict['train_ulb']):
start_run.record()
if self.it > args.num_train_iter:
break
end_batch.record()
torch.cuda.synchronize()
start_run.record()
num_lb = x_lb.shape[0]
num_ulb = x_ulb.shape[0]
assert num_ulb == x_ulb.shape[0]
if args.gpu is not None:
x_lb, x_ulb = x_lb.cuda(args.gpu), x_ulb.cuda(args.gpu)
y_lb = y_lb.cuda(args.gpu)
# convert to torch variables
x_lb, x_ulb, y_lb = Variable(x_lb), Variable(x_ulb), Variable(y_lb)
student_input = torch.cat((x_lb, x_ulb))
with torch.no_grad():
output_teacher_batch = self.teacher_model(x_ulb)
output_batch = self.train_model(student_input)
if args.gpu is not None:
output_teacher_batch = output_teacher_batch.cuda()
total_loss = loss_fn_kd(output_batch, y_lb, output_teacher_batch, args)
# parameter updates
if args.amp:
scaler.scale(total_loss).backward()
scaler.step(self.optimizer)
scaler.update()
else:
total_loss.backward()
self.optimizer.step()
self.scheduler.step()
self.train_model.zero_grad()
with torch.no_grad():
self._eval_model_update()
# update the average loss
loss_avg.update(total_loss.data)
end_run.record()
torch.cuda.synchronize()
# tensorboard_dict update
tb_dict = {}
tb_dict['train/total_loss'] = total_loss.detach()
tb_dict['lr'] = self.optimizer.param_groups[0]['lr']
tb_dict['train/prefecth_time'] = start_batch.elapsed_time(end_batch) / 1000.
tb_dict['train/run_time'] = start_run.elapsed_time(end_run) / 1000.
if self.it % self.num_eval_iter == 0:
eval_dict = self.evaluate(args=args)
tb_dict.update(eval_dict)
save_path = os.path.join(args.save_dir, args.save_name)
if tb_dict['eval/top-1-acc'] > best_eval_acc:
best_eval_acc = tb_dict['eval/top-1-acc']
best_it = self.it
self.print_fn(
f"{self.it} iteration, USE_EMA: {hasattr(self, 'eval_model')}, {tb_dict}, BEST_EVAL_ACC: {best_eval_acc}, at {best_it} iters")
if not args.multiprocessing_distributed or \
(args.multiprocessing_distributed and args.rank % ngpus_per_node == 0):
if self.it == best_it:
self.save_model('model_best.pth', save_path)
if not self.tb_log is None:
self.tb_log.update(tb_dict, self.it)
self.it += 1
del tb_dict
start_batch.record()
if self.it > 2 ** 19:
self.num_eval_iter = 1000
eval_dict = self.evaluate(args=args)
eval_dict.update({'eval/best_acc': best_eval_acc, 'eval/best_it': best_it})
return eval_dict
@torch.no_grad()
def evaluate(self, eval_loader=None, args=None):
use_ema = hasattr(self, 'eval_model')
eval_model = self.eval_model if use_ema else self.train_model
eval_model.eval()
if eval_loader is None:
eval_loader = self.loader_dict['eval']
total_loss = 0.0
total_acc = 0.0
total_num = 0.0
for x, y in eval_loader:
x, y = x.cuda(args.gpu), y.cuda(args.gpu)
num_batch = x.shape[0]
total_num += num_batch
logits = eval_model(x)
loss = F.cross_entropy(logits, y, reduction='mean')
acc = torch.sum(torch.max(logits, dim=-1)[1] == y)
# pdb.set_trace()
total_loss += loss.detach() * num_batch
total_acc += acc.detach()
if not use_ema:
eval_model.train()
return {'eval/loss': total_loss / total_num, 'eval/top-1-acc': total_acc / total_num}
def save_model(self, save_name, save_path):
save_filename = os.path.join(save_path, save_name)
train_model = self.train_model.module if hasattr(self.train_model, 'module') else self.train_model
eval_model = self.eval_model.module if hasattr(self.eval_model, 'module') else self.eval_model
torch.save({'train_model': train_model.state_dict(),
'eval_model': eval_model.state_dict(),
'optimizer': self.optimizer.state_dict(),
'scheduler': self.scheduler.state_dict(),
'it': self.it}, save_filename)
self.print_fn(f"model saved: {save_filename}")
def load_model(self, load_path):
checkpoint = torch.load(load_path)
train_model = self.train_model.module if hasattr(self.train_model, 'module') else self.train_model
eval_model = self.eval_model.module if hasattr(self.eval_model, 'module') else self.eval_model
for key in checkpoint.keys():
if hasattr(self, key) and getattr(self, key) is not None:
if 'train_model' in key:
train_model.load_state_dict(checkpoint[key])
elif 'eval_model' in key:
eval_model.load_state_dict(checkpoint[key])
elif key == 'it':
self.it = checkpoint[key]
elif key == 'scheduler':
self.scheduler.load_state_dict(checkpoint[key])
elif key == 'optimizer':
self.optimizer.load_state_dict(checkpoint[key])
else:
getattr(self, key).load_state_dict(checkpoint[key])
self.print_fn(f"Check Point Loading: {key} is LOADED")
else:
self.print_fn(f"Check Point Loading: {key} is **NOT** LOADED")
if __name__ == "__main__":
pass
| 39.566004 | 146 | 0.60489 | 2,815 | 21,880 | 4.447957 | 0.10302 | 0.038815 | 0.033544 | 0.009584 | 0.874611 | 0.869739 | 0.856801 | 0.850891 | 0.847536 | 0.847536 | 0 | 0.007095 | 0.297806 | 21,880 | 552 | 147 | 39.637681 | 0.807863 | 0.15809 | 0 | 0.822034 | 0 | 0.00565 | 0.063421 | 0.002782 | 0 | 0 | 0 | 0 | 0.00565 | 1 | 0.056497 | false | 0.002825 | 0.039548 | 0.002825 | 0.121469 | 0.033898 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8e132b5479a2e7e0922f627b8a4e6e33fcacaf88 | 160 | py | Python | dataprocessor/__init__.py | Khurramjaved96/Dicta | 416638a3d1ad851b00394e55a7574ec978080d51 | [
"Apache-2.0"
] | 60 | 2019-05-29T17:09:15.000Z | 2022-03-30T15:35:57.000Z | dataprocessor/__init__.py | Khurramjaved96/Dicta | 416638a3d1ad851b00394e55a7574ec978080d51 | [
"Apache-2.0"
] | 6 | 2018-06-08T14:32:34.000Z | 2019-05-20T05:34:39.000Z | dataprocessor/__init__.py | Khurramjaved96/Dicta | 416638a3d1ad851b00394e55a7574ec978080d51 | [
"Apache-2.0"
] | 28 | 2019-06-10T04:07:24.000Z | 2022-01-12T19:21:49.000Z | from dataprocessor.datasetfactory import *
from dataprocessor.dataloaders import *
from dataprocessor.loaderfactory import *
from dataprocessor.dataset import * | 40 | 42 | 0.85625 | 16 | 160 | 8.5625 | 0.4375 | 0.49635 | 0.50365 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09375 | 160 | 4 | 43 | 40 | 0.944828 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
8e1905601c3f80b78620ffb4e8ec1c17ffd2b0c5 | 39 | py | Python | lib/cloudbusting/__init__.py | Team-Cloudbusters/kaggle-max-planck-cloud-classification | 178b2047eee738abf78c986ff0254a75c57ecef0 | [
"MIT"
] | 1 | 2019-10-14T15:44:46.000Z | 2019-10-14T15:44:46.000Z | lib/cloudbusting/__init__.py | Team-Cloudbusters/kaggle-max-planck-cloud-classification | 178b2047eee738abf78c986ff0254a75c57ecef0 | [
"MIT"
] | 5 | 2019-09-25T16:13:09.000Z | 2019-10-14T23:27:35.000Z | lib/cloudbusting/__init__.py | Team-Cloudbusters/kaggle-max-planck-cloud-classification | 178b2047eee738abf78c986ff0254a75c57ecef0 | [
"MIT"
] | 1 | 2019-09-23T19:24:49.000Z | 2019-09-23T19:24:49.000Z | from . import data
from . import tools
| 13 | 19 | 0.74359 | 6 | 39 | 4.833333 | 0.666667 | 0.689655 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.205128 | 39 | 2 | 20 | 19.5 | 0.935484 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
f3d5d7da66b56d8dcef1e0b2b6b3cf90bc0aa68d | 15,748 | py | Python | tests/test_throughput.py | czifraj2/nagios_check_paloalto | e0e207f41f5c2fb828d2bae78fbf5a52c198231c | [
"Apache-2.0"
] | 4 | 2015-04-14T13:02:48.000Z | 2015-08-09T17:39:43.000Z | tests/test_throughput.py | czifraj2/nagios_check_paloalto | e0e207f41f5c2fb828d2bae78fbf5a52c198231c | [
"Apache-2.0"
] | 1 | 2015-06-01T13:02:31.000Z | 2015-06-01T14:16:33.000Z | tests/test_throughput.py | czifraj2/nagios_check_paloalto | e0e207f41f5c2fb828d2bae78fbf5a52c198231c | [
"Apache-2.0"
] | 4 | 2020-09-18T10:26:57.000Z | 2021-12-31T12:00:56.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_check_paloalto
----------------------------------
Tests for `check_paloalto` modules.
"""
import mock
import pytest
import responses
from nagiosplugin.state import ServiceState
import check_pa.modules.throughput
import utils
class TestThroughput(object):
@classmethod
def setup_class(cls):
"""setup host and token for test of Palo Alto Firewall"""
cls.host = 'localhost'
cls.token = 'test'
@responses.activate
def test_with_three_interfaces(self, statefile):
self.interface = 'ethernet1/1, ethernet1/2, ethernet1/3'
interfaces = []
for interface in self.interface.split(','):
interfaces.append(interface)
file1 = 'throughput1.xml'
file2 = 'throughput2.xml'
file3 = 'throughput3.xml'
check = check_pa.modules.throughput.create_check(self)
objects = []
for resource in check.resources:
objects.append(resource)
with responses.RequestsMock() as rsps:
rsps.add(responses.GET,
objects[0].xml_obj.build_request_url(),
body=utils.read_xml(file1),
status=200,
content_type='document',
match_querystring=True)
rsps.add(responses.GET,
objects[1].xml_obj.build_request_url(),
body=utils.read_xml(file2),
status=200,
content_type='document',
match_querystring=True)
rsps.add(responses.GET,
objects[2].xml_obj.build_request_url(),
body=utils.read_xml(file3),
status=200,
content_type='document',
match_querystring=True)
from nagiosplugin import Cookie
# Resetting cookies
with Cookie(statefile) as cookie:
cookie[self.host + interfaces[0] + 'i'] = 0
cookie[self.host + interfaces[0] + 'o'] = 0
cookie[self.host + interfaces[0] + 't'] = 1441324800
cookie[self.host + interfaces[1] + 'i'] = 0
cookie[self.host + interfaces[1] + 'o'] = 0
cookie[self.host + interfaces[1] + 't'] = 1441324800
cookie[self.host + interfaces[2] + 'i'] = 0
cookie[self.host + interfaces[2] + 'o'] = 0
cookie[self.host + interfaces[2] + 't'] = 1441324800
# Check will be executed exactly one second later
now = 1441324801
xml_ibytes = 1000000
xml_obytes = 1000000
with mock.patch('check_pa.modules.throughput.get_time',
return_value=now):
with mock.patch('check_pa.xml_reader.Finder.find_item',
side_effect=[xml_ibytes, xml_obytes,
xml_ibytes, xml_ibytes,
xml_ibytes, xml_ibytes]):
with pytest.raises(SystemExit):
check.main(verbose=3)
assert check.exitcode == 0
assert check.state == ServiceState(code=0, text='ok')
# 3000000 Byte = 3 MByte = 24 Mbit in 1 second = 24.0 Mb/s
assert check.summary_str == 'Input is 24.0 Mb/s - Output is 24.0 ' \
'Mb/s'
@responses.activate
def test_with_one_interface(self, statefile):
file1 = 'throughput1.xml'
self.interface = 'ethernet1/1'
interfaces = []
for interface in self.interface.split(','):
interfaces.append(interface)
check = check_pa.modules.throughput.create_check(self)
objects = []
for res in check.resources:
objects.append(res)
with responses.RequestsMock() as rsps:
rsps.add(responses.GET,
objects[0].xml_obj.build_request_url(),
body=utils.read_xml(file1),
status=200,
content_type='document',
match_querystring=True)
from nagiosplugin import Cookie
with Cookie(statefile) as cookie:
cookie[self.host + interfaces[0] + 'i'] = 0
cookie[self.host + interfaces[0] + 'o'] = 0
cookie[self.host + interfaces[0] + 't'] = 1441324800
# Check will be executed exactly ten seconds later
now = 1441324810
xml_ibytes = 1000000 # 1000000 Byte = 1 MByte
xml_obytes = 1000000 # 1000000 Byte = 1 MByte
with mock.patch('check_pa.modules.throughput.get_time',
return_value=now):
with mock.patch('check_pa.xml_reader.Finder.find_item',
side_effect=[xml_ibytes, xml_obytes]):
with pytest.raises(SystemExit):
check.main(verbose=3)
assert check.exitcode == 0
assert check.state == ServiceState(code=0, text='ok')
assert check.summary_str == 'Input is 0.8 Mb/s - Output is 0.8 ' \
'Mb/s'
def check_pa(self, time, ibytes, obytes, filename):
objects = []
file1 = filename
check = check_pa.modules.throughput.create_check(self)
for res in check.resources:
objects.append(res)
with responses.RequestsMock() as rsps:
rsps.add(responses.GET,
objects[0].xml_obj.build_request_url(),
body=utils.read_xml(file1),
status=200,
content_type='document',
match_querystring=True)
with mock.patch('check_pa.modules.throughput.get_time',
return_value=time):
with mock.patch('check_pa.xml_reader.Finder.find_item',
side_effect=[ibytes, obytes]):
with pytest.raises(SystemExit):
check.main(verbose=3)
return check
@responses.activate
def test_with_different_ips(self, statefile):
pa_1 = self.__class__()
pa_1.host = "192.168.0.1"
pa_1.interface = "ethernet1/1"
pa_2 = self.__class__()
pa_2.host = "192.168.0.2"
pa_2.interface = "ethernet1/1"
from nagiosplugin import Cookie
with Cookie(statefile) as cookie:
cookie[pa_1.host + pa_1.interface + 'i'] = 0
cookie[pa_1.host + pa_1.interface + 'o'] = 0
cookie[pa_1.host + pa_1.interface + 't'] = 1441324800
check = pa_1.check_pa(1441324800, 10, 10, "throughput1.xml")
assert check.exitcode == 3
assert check.state == ServiceState(code=3, text='unknown')
assert check.summary_str == 'Difference between old timestamp and new timestamp is less or equal 0: If it is the first time you run the script, please execute it again!'
check = pa_2.check_pa(1441324810, 110, 110, "throughput1.xml")
assert check.exitcode == 3
assert check.state == ServiceState(code=3, text='unknown')
assert check.summary_str == 'Difference between old timestamp and new timestamp is less or equal 0: If it is the first time you run the script, please execute it again!'
check = pa_1.check_pa(1441324801, 1000000, 1000000, "throughput1.xml")
assert check.exitcode == 0
assert check.state == ServiceState(code=0, text='ok')
# 1000000 Byte = 1 MByte = 8 Mbit in 1 second = 8.0 Mb/s
assert check.summary_str == 'Input is 8.0 Mb/s - Output is 8.0 ' \
'Mb/s'
check = pa_2.check_pa(1441324811, 1000000, 1000000, "throughput1.xml")
assert check.exitcode == 0
assert check.state == ServiceState(code=0, text='ok')
# 1000000 Byte = 1 MByte = 8 Mbit in 1 second = 8.0 Mb/s
assert check.summary_str == 'Input is 8.0 Mb/s - Output is 8.0 ' \
'Mb/s'
@responses.activate
def test_new_input_less_than_old(self, statefile):
file1 = 'throughput1.xml'
self.interface = 'ethernet1/1'
interfaces = []
for interface in self.interface.split(','):
interfaces.append(interface)
check = check_pa.modules.throughput.create_check(self)
objects = []
for res in check.resources:
objects.append(res)
with responses.RequestsMock() as rsps:
rsps.add(responses.GET,
objects[0].xml_obj.build_request_url(),
body=utils.read_xml(file1),
status=200,
content_type='document',
match_querystring=True)
from nagiosplugin import Cookie
with Cookie(statefile) as cookie:
cookie[self.host + interfaces[0] + 'i'] = 10
cookie[self.host + interfaces[0] + 'o'] = 10
cookie[self.host + interfaces[0] + 't'] = 1441324800
# Check will be executed exactly ten seconds later
now = 1441324810
xml_ibytes = 9
xml_obytes = 11
with mock.patch('check_pa.modules.throughput.get_time',
return_value=now):
with mock.patch('check_pa.xml_reader.Finder.find_item',
side_effect=[xml_ibytes, xml_obytes]):
with pytest.raises(SystemExit):
check.main(verbose=3)
assert check.exitcode == 3
assert check.state == ServiceState(code=3, text='unknown')
assert check.summary_str == 'Couldn\'t get a valid value: Found throughput less then old!'
@responses.activate
def test_new_output_less_than_old(self, statefile):
file1 = 'throughput1.xml'
self.interface = 'ethernet1/1'
interfaces = []
for interface in self.interface.split(','):
interfaces.append(interface)
check = check_pa.modules.throughput.create_check(self)
objects = []
for res in check.resources:
objects.append(res)
with responses.RequestsMock() as rsps:
rsps.add(responses.GET,
objects[0].xml_obj.build_request_url(),
body=utils.read_xml(file1),
status=200,
content_type='document',
match_querystring=True)
from nagiosplugin import Cookie
with Cookie(statefile) as cookie:
cookie[self.host + interfaces[0] + 'i'] = 10
cookie[self.host + interfaces[0] + 'o'] = 10
cookie[self.host + interfaces[0] + 't'] = 1441324800
# Check will be executed exactly ten seconds later
now = 1441324810
xml_ibytes = 11
xml_obytes = 9
with mock.patch('check_pa.modules.throughput.get_time',
return_value=now):
with mock.patch('check_pa.xml_reader.Finder.find_item',
side_effect=[xml_ibytes, xml_obytes]):
with pytest.raises(SystemExit):
check.main(verbose=3)
with Cookie(statefile) as cookie:
input = cookie.get(self.host + interfaces[0] + 'i')
output = cookie.get(self.host + interfaces[0] + 'o')
time = cookie.get(self.host + interfaces[0] + 't')
assert input == xml_ibytes
assert output == xml_obytes
assert time == now
assert check.exitcode == 3
assert check.state == ServiceState(code=3, text='unknown')
assert check.summary_str == 'Couldn\'t get a valid value: Found throughput less then old!'
@responses.activate
def test_same_time(self, statefile):
file1 = 'throughput1.xml'
self.interface = 'ethernet1/1'
interfaces = []
for interface in self.interface.split(','):
interfaces.append(interface)
check = check_pa.modules.throughput.create_check(self)
objects = []
for resource in check.resources:
objects.append(resource)
with responses.RequestsMock() as rsps:
rsps.add(responses.GET,
objects[0].xml_obj.build_request_url(),
body=utils.read_xml(file1),
status=200,
content_type='document',
match_querystring=True)
from nagiosplugin import Cookie
with Cookie(statefile) as cookie:
cookie[self.host + interfaces[0] + 'i'] = 10
cookie[self.host + interfaces[0] + 'o'] = 10
cookie[self.host + interfaces[0] + 't'] = 1441324800
# Check will be executed exactly at the same time
now = 1441324800
xml_ibytes = 11
xml_obytes = 10
with mock.patch('check_pa.modules.throughput.get_time',
return_value=now):
with mock.patch('check_pa.xml_reader.Finder.find_item',
side_effect=[xml_ibytes, xml_obytes]):
with pytest.raises(SystemExit):
check.main(verbose=3)
assert check.exitcode == 3
assert check.state == ServiceState(code=3, text='unknown')
assert check.summary_str == 'Difference between old timestamp ' \
'and new timestamp is less or equal 0: ' \
'If it is the first time you run the ' \
'script, please execute it again!'
@responses.activate
def test_api_failed(self, statefile):
file1 = 'throughput1.xml'
self.interface = 'ethernet1/1'
interfaces = []
for interface in self.interface.split(','):
interfaces.append(interface)
check = check_pa.modules.throughput.create_check(self)
objects = []
for res in check.resources:
objects.append(res)
with responses.RequestsMock() as rsps:
rsps.add(responses.GET,
objects[0].xml_obj.build_request_url(),
body=utils.read_xml(file1),
status=200,
content_type='document',
match_querystring=True)
from nagiosplugin import Cookie
with Cookie(statefile) as cookie:
cookie[self.host + interfaces[0] + 'i'] = 10
cookie[self.host + interfaces[0] + 'o'] = 10
cookie[self.host + interfaces[0] + 't'] = 1441324800
# Check will be executed exactly ten seconds later
now = 1441324810
xml_ibytes = ""
xml_obytes = ""
with mock.patch('check_pa.modules.throughput.get_time',
return_value=now):
with mock.patch('check_pa.xml_reader.Finder.find_item',
side_effect=[xml_ibytes, xml_obytes]):
with pytest.raises(SystemExit):
check.main(verbose=3)
assert check.exitcode == 3
assert check.state == ServiceState(code=3, text='unknown')
assert check.summary_str == 'Couldn\'t get a valid value!'
| 38.223301 | 177 | 0.539497 | 1,702 | 15,748 | 4.86604 | 0.103995 | 0.026201 | 0.058681 | 0.069548 | 0.877807 | 0.846414 | 0.80838 | 0.795339 | 0.783748 | 0.765033 | 0 | 0.053742 | 0.361951 | 15,748 | 411 | 178 | 38.316302 | 0.770502 | 0.045022 | 0 | 0.761438 | 0 | 0.006536 | 0.105442 | 0.033571 | 0 | 0 | 0 | 0 | 0.107843 | 1 | 0.029412 | false | 0 | 0.042484 | 0 | 0.078431 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f3e71dd4cab19069b8475d93d29cce4df614ca31 | 102 | py | Python | test/test_del_group.py | vanyushkina/python_training | 77c856e26a7affc25315eded7e3771174cdb8a20 | [
"Apache-2.0"
] | null | null | null | test/test_del_group.py | vanyushkina/python_training | 77c856e26a7affc25315eded7e3771174cdb8a20 | [
"Apache-2.0"
] | null | null | null | test/test_del_group.py | vanyushkina/python_training | 77c856e26a7affc25315eded7e3771174cdb8a20 | [
"Apache-2.0"
] | null | null | null | from model.group import Group
def test_delete_first_group(app):
app.group.delete_first_group()
| 14.571429 | 34 | 0.784314 | 16 | 102 | 4.6875 | 0.5625 | 0.293333 | 0.426667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.137255 | 102 | 6 | 35 | 17 | 0.852273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
6d11f2ca095d104faa26b8a4ad7cba159b1a07ec | 43 | py | Python | tests/syntax/should_be_comprehension.py | friendly-traceback/friendly-traceback | 4f6785f14c271a4d6412ef19c140f9d380cdbcbf | [
"MIT"
] | 45 | 2021-07-06T03:30:20.000Z | 2022-03-16T17:30:58.000Z | tests/syntax/should_be_comprehension.py | friendly-traceback/friendly-traceback | 4f6785f14c271a4d6412ef19c140f9d380cdbcbf | [
"MIT"
] | 110 | 2021-06-28T11:48:46.000Z | 2022-03-25T20:41:25.000Z | tests/syntax/should_be_comprehension.py | friendly-traceback/friendly-traceback | 4f6785f14c271a4d6412ef19c140f9d380cdbcbf | [
"MIT"
] | 4 | 2021-07-05T20:56:39.000Z | 2021-11-11T20:24:34.000Z | a = [
for i in 1, 2, 3:
i**2
]
| 8.6 | 21 | 0.27907 | 9 | 43 | 1.333333 | 0.777778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 0.534884 | 43 | 4 | 22 | 10.75 | 0.4 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
edd21394997518da0f6b110ca106a7eb52f36376 | 9,289 | py | Python | lib/systems/c165.py | pulsar-chem/BPModule | f8e64e04fdb01947708f098e833600c459c2ff0e | [
"BSD-3-Clause"
] | null | null | null | lib/systems/c165.py | pulsar-chem/BPModule | f8e64e04fdb01947708f098e833600c459c2ff0e | [
"BSD-3-Clause"
] | null | null | null | lib/systems/c165.py | pulsar-chem/BPModule | f8e64e04fdb01947708f098e833600c459c2ff0e | [
"BSD-3-Clause"
] | null | null | null | import pulsar as psr
def load_ref_system():
""" Returns c165 as found in the IQMol fragment library.
All credit to https://github.com/nutjunkie/IQmol
"""
return psr.make_system("""
C -0.00000 -0.00000 -0.00000
C 0.89844 -0.89844 0.89844
C 0.89844 0.89844 -0.89844
C -0.89844 0.89844 0.89844
C -0.89844 -0.89844 -0.89844
C 1.80322 -0.00803 1.80322
C 1.80322 1.80322 -0.00803
C 1.80322 0.00803 -1.80322
C 1.80322 -1.80322 0.00803
C -0.00803 1.80322 1.80322
C -1.80322 0.00803 1.80322
C 0.00803 -1.80322 1.80322
C -1.80322 1.80322 0.00803
C 0.00803 1.80322 -1.80322
C -1.80322 -0.00803 -1.80322
C -1.80322 -1.80322 -0.00803
C -0.00803 -1.80322 -1.80322
C 0.90592 2.70591 0.90592
C 0.90592 0.90592 2.70591
C 2.70591 0.90592 0.90592
C 2.70591 -0.90592 -0.90592
C 0.90592 -0.90592 -2.70591
C 0.90592 -2.70591 -0.90592
C -0.90592 -0.90592 2.70591
C -2.70591 -0.90592 0.90592
C -0.90592 -2.70591 0.90592
C -0.90592 2.70591 -0.90592
C -2.70591 0.90592 -0.90592
C -0.90592 0.90592 -2.70591
C 3.61872 0.00000 -0.00000
C 0.00000 -0.00000 3.61872
C -0.00000 3.61872 0.00000
C 0.00000 -3.61872 -0.00000
C -0.00000 0.00000 -3.61872
C -3.61872 -0.00000 0.00000
C 2.72364 -0.92438 2.72364
C 0.92438 -2.72364 2.72364
C 2.72364 -2.72364 0.92438
C 2.72364 2.72364 -0.92438
C 0.92438 2.72364 -2.72364
C 2.72364 0.92438 -2.72364
C -0.92438 2.72364 2.72364
C -2.72364 2.72364 0.92438
C -2.72364 0.92438 2.72364
C -2.72364 -0.92438 -2.72364
C -2.72364 -2.72364 -0.92438
C -0.92438 -2.72364 -2.72364
C 1.82914 3.63567 1.82914
C 1.82914 1.82914 3.63567
C 3.63567 1.82914 1.82914
C 1.76719 -1.76719 3.56010
C 3.56010 -1.76719 1.76719
C 1.76719 -3.56010 1.76719
C 1.76719 3.56010 -1.76719
C 3.56010 1.76719 -1.76719
C 3.63567 -1.82914 -1.82914
C 1.82914 -3.63567 -1.82914
C 1.76719 1.76719 -3.56010
C 1.82914 -1.82914 -3.63567
C -1.76719 3.56010 1.76719
C -1.76719 1.76719 3.56010
C -1.82914 -1.82914 3.63567
C -1.82914 -3.63567 1.82914
C -1.82914 3.63567 -1.82914
C -1.76719 -3.56010 -1.76719
C -1.82914 1.82914 -3.63567
C -1.76719 -1.76719 -3.56010
C -3.56010 1.76719 1.76719
C -3.63567 -1.82914 1.82914
C -3.63567 1.82914 -1.82914
C -3.56010 -1.76719 -1.76719
C 4.55134 -0.93187 0.93187
C 4.55134 0.93187 -0.93187
C 0.93187 -0.93187 4.55134
C 2.66842 2.66842 2.66842
C 0.93187 -4.55134 0.93187
C 0.93187 4.55134 -0.93187
C 2.66842 -2.66842 -2.66842
C 0.93187 0.93187 -4.55134
C -0.93187 0.93187 4.55134
C -0.93187 4.55134 0.93187
C -2.66842 -2.66842 2.66842
C -2.66842 2.66842 -2.66842
C -0.93187 -4.55134 -0.93187
C -0.93187 -0.93187 -4.55134
C -4.55134 0.93187 0.93187
C -4.55134 -0.93187 -0.93187
C 3.55677 0.07242 3.55677
C 3.55677 3.55677 0.07242
C 3.55677 -3.55677 -0.07242
C 3.55677 -0.07242 -3.55677
C 0.07242 3.55677 3.55677
C -0.07242 -3.55677 3.55677
C -0.07242 3.55677 -3.55677
C 0.07242 -3.55677 -3.55677
C -3.55677 -0.07242 3.55677
C -3.55677 3.55677 -0.07242
C -3.55677 -3.55677 0.07242
C -3.55677 0.07242 -3.55677
C 4.46429 2.67183 0.81648
C 4.46429 0.81648 2.67183
C 4.46429 -0.81648 -2.67183
C 4.46429 -2.67183 -0.81648
C 0.81648 2.67183 4.46429
C 2.67183 0.81648 4.46429
C 0.81648 4.46429 2.67183
C 2.67183 4.46429 0.81648
C 2.67183 -4.46429 -0.81648
C 0.81648 -4.46429 -2.67183
C 2.67183 -0.81648 -4.46429
C 0.81648 -2.67183 -4.46429
C -2.67183 -0.81648 4.46429
C -0.81648 -2.67183 4.46429
C -0.81648 -4.46429 2.67183
C -2.67183 -4.46429 0.81648
C -2.67183 4.46429 -0.81648
C -0.81648 4.46429 -2.67183
C -0.81648 2.67183 -4.46429
C -2.67183 0.81648 -4.46429
C -4.46429 -0.81648 2.67183
C -4.46429 -2.67183 0.81648
C -4.46429 2.67183 -0.81648
C -4.46429 0.81648 -2.67183
C 5.39574 0.07638 1.79360
C 5.39574 1.79360 0.07638
C 5.39574 -0.07638 -1.79360
C 5.39574 -1.79360 -0.07638
C 1.79360 0.07638 5.39574
C 1.79360 5.39574 0.07638
C 1.79360 -5.39574 -0.07638
C 1.79360 -0.07638 -5.39574
C 0.07638 1.79360 5.39574
C -1.79360 -0.07638 5.39574
C -0.07638 -1.79360 5.39574
C 0.07638 5.39574 1.79360
C -0.07638 -5.39574 1.79360
C -1.79360 5.39574 -0.07638
C -0.07638 5.39574 -1.79360
C -1.79360 -5.39574 0.07638
C 0.07638 -5.39574 -1.79360
C -0.07638 1.79360 -5.39574
C -1.79360 0.07638 -5.39574
C 0.07638 -1.79360 -5.39574
C -5.39574 -0.07638 1.79360
C -5.39574 1.79360 -0.07638
C -5.39574 0.07638 -1.79360
C -5.39574 -1.79360 0.07638
C 6.26923 0.87174 0.87174
C 6.26923 -0.87174 -0.87174
C 0.87174 0.87174 6.26923
C 0.87174 6.26923 0.87174
C 0.87174 -6.26923 -0.87174
C 0.87174 -0.87174 -6.26923
C -0.87174 -0.87174 6.26923
C -0.87174 -6.26923 0.87174
C -0.87174 6.26923 -0.87174
C -0.87174 0.87174 -6.26923
C -6.26923 -0.87174 0.87174
C -6.26923 0.87174 -0.87174
C 0.00000 -0.00000 6.97479
C 6.97479 0.00000 -0.00000
C 0.00000 -6.97479 -0.00000
C -0.00000 0.00000 -6.97479
C -6.97479 -0.00000 0.00000
C -0.00000 6.97479 0.00000
""")
| 53.693642 | 60 | 0.352245 | 1,184 | 9,289 | 2.76098 | 0.050676 | 0.039768 | 0.021413 | 0.036708 | 0.959009 | 0.956868 | 0.953197 | 0.953197 | 0.935148 | 0.921383 | 0 | 0.735709 | 0.564969 | 9,289 | 172 | 61 | 54.005814 | 0.073249 | 0.010873 | 0 | 0 | 0 | 0 | 0.990838 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005917 | true | 0 | 0.005917 | 0 | 0.017751 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
eddc216544410ebc5d2ff8fdae44b57d18ce16ae | 96 | py | Python | tests/helpers/examples/operation.py | nicoddemus/dependencies | 74180e2c6098d8ad03bc53c5703bdf8dc61c3ed9 | [
"BSD-2-Clause"
] | null | null | null | tests/helpers/examples/operation.py | nicoddemus/dependencies | 74180e2c6098d8ad03bc53c5703bdf8dc61c3ed9 | [
"BSD-2-Clause"
] | null | null | null | tests/helpers/examples/operation.py | nicoddemus/dependencies | 74180e2c6098d8ad03bc53c5703bdf8dc61c3ed9 | [
"BSD-2-Clause"
] | null | null | null | class Foo:
def do(self, arg):
pass
class Bar:
def do(self, arg):
pass
| 10.666667 | 22 | 0.5 | 14 | 96 | 3.428571 | 0.571429 | 0.208333 | 0.375 | 0.5 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.395833 | 96 | 8 | 23 | 12 | 0.827586 | 0 | 0 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0.333333 | 0 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 8 |
b62aaa365d6de9aeb8032b128e2c9edbe664af01 | 2,462 | py | Python | tests/test_insert_task_list_item.py | xamronpc/MarkdownEditing | 9e96fa62badefddee1e71233c8aa348670662954 | [
"MIT"
] | null | null | null | tests/test_insert_task_list_item.py | xamronpc/MarkdownEditing | 9e96fa62badefddee1e71233c8aa348670662954 | [
"MIT"
] | null | null | null | tests/test_insert_task_list_item.py | xamronpc/MarkdownEditing | 9e96fa62badefddee1e71233c8aa348670662954 | [
"MIT"
] | null | null | null | from MarkdownEditing.tests import DereferrablePanelTestCase
class InsertTaskListItemTestCase(DereferrablePanelTestCase):
def setUp(self):
self.setBlockText("")
def test_insert_unaligned_task_with_asterisk(self):
self.view.settings().set("mde.list_align_text", False)
self.view.settings().set("mde.list_indent_bullets", ["*", "-", "+"])
self.setCaretTo(1, 1)
self.view.run_command("mde_insert_task_list_item")
self.assertEqualBlockText(
"""
* [ ]\x20
"""
)
def test_insert_unaligned_task_with_minus(self):
self.view.settings().set("mde.list_align_text", False)
self.view.settings().set("mde.list_indent_bullets", ["-", "*", "+"])
self.setCaretTo(1, 1)
self.view.run_command("mde_insert_task_list_item")
self.assertEqualBlockText(
"""
- [ ]\x20
"""
)
def test_insert_unaligned_task_with_plus(self):
self.view.settings().set("mde.list_align_text", False)
self.view.settings().set("mde.list_indent_bullets", ["+", "-", "*"])
self.setCaretTo(1, 1)
self.view.run_command("mde_insert_task_list_item")
self.assertEqualBlockText(
"""
+ [ ]\x20
"""
)
def test_insert_aligned_task_with_asterisk(self):
self.view.settings().set("mde.list_align_text", True)
self.view.settings().set("mde.list_indent_bullets", ["*", "-", "+"])
self.setCaretTo(1, 1)
self.view.run_command("mde_insert_task_list_item")
self.assertEqualBlockText(
"""
* [ ]\t
"""
)
def test_insert_aligned_task_with_minus(self):
self.view.settings().set("mde.list_align_text", True)
self.view.settings().set("mde.list_indent_bullets", ["-", "*", "+"])
self.setCaretTo(1, 1)
self.view.run_command("mde_insert_task_list_item")
self.assertEqualBlockText(
"""
- [ ]\t
"""
)
def test_insert_aligned_task_with_plus(self):
self.view.settings().set("mde.list_align_text", True)
self.view.settings().set("mde.list_indent_bullets", ["+", "-", "*"])
self.setCaretTo(1, 1)
self.view.run_command("mde_insert_task_list_item")
self.assertEqualBlockText(
"""
+ [ ]\t
"""
)
| 30.775 | 76 | 0.573517 | 259 | 2,462 | 5.127413 | 0.150579 | 0.108434 | 0.144578 | 0.171687 | 0.895331 | 0.895331 | 0.873494 | 0.873494 | 0.873494 | 0.873494 | 0 | 0.010056 | 0.272949 | 2,462 | 79 | 77 | 31.164557 | 0.731844 | 0 | 0 | 0.652174 | 0 | 0 | 0.189019 | 0.129613 | 0 | 0 | 0 | 0 | 0.130435 | 1 | 0.152174 | false | 0 | 0.021739 | 0 | 0.195652 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b6323e297874fac28ae5a3f47e96b5cc12aeaba8 | 266 | py | Python | tests/test_linalg/test_solvers/cases/information_ops.py | fxbriol/probnum | 7e0e94cf9146aaa2b730b02c6d75a022cd629b5c | [
"MIT"
] | null | null | null | tests/test_linalg/test_solvers/cases/information_ops.py | fxbriol/probnum | 7e0e94cf9146aaa2b730b02c6d75a022cd629b5c | [
"MIT"
] | null | null | null | tests/test_linalg/test_solvers/cases/information_ops.py | fxbriol/probnum | 7e0e94cf9146aaa2b730b02c6d75a022cd629b5c | [
"MIT"
] | null | null | null | """Test cases defined by information operators."""
from probnum.linalg.solvers import information_ops
def case_matvec():
return information_ops.MatVecInformationOp()
def case_projected_residual():
return information_ops.ProjectedResidualInformationOp()
| 22.166667 | 59 | 0.804511 | 28 | 266 | 7.428571 | 0.714286 | 0.201923 | 0.192308 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116541 | 266 | 11 | 60 | 24.181818 | 0.885106 | 0.165414 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | true | 0 | 0.2 | 0.4 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
b650cdcfdebfa5d13a770b26881770ad766de600 | 96 | py | Python | greenbot/web/routes/__init__.py | EMorf/greenbot | 5528fcb9246109d6742a867b9668a408d43701d6 | [
"MIT"
] | null | null | null | greenbot/web/routes/__init__.py | EMorf/greenbot | 5528fcb9246109d6742a867b9668a408d43701d6 | [
"MIT"
] | null | null | null | greenbot/web/routes/__init__.py | EMorf/greenbot | 5528fcb9246109d6742a867b9668a408d43701d6 | [
"MIT"
] | null | null | null | import greenbot.web.routes.admin
import greenbot.web.routes.api
import greenbot.web.routes.base
| 24 | 32 | 0.84375 | 15 | 96 | 5.4 | 0.466667 | 0.518519 | 0.62963 | 0.851852 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0625 | 96 | 3 | 33 | 32 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
b69de3acbd06b50246b4aa1ea748cf6052824fb5 | 15,589 | py | Python | core/feature_extractor.py | jacke121/MBMD | 2daf5edb4fb40ee652baead4f9332ca00fa111a5 | [
"MIT"
] | 220 | 2018-09-17T15:42:54.000Z | 2021-09-13T13:14:22.000Z | core/feature_extractor.py | jacke121/MBMD | 2daf5edb4fb40ee652baead4f9332ca00fa111a5 | [
"MIT"
] | 12 | 2018-09-19T09:30:42.000Z | 2019-07-01T04:03:51.000Z | core/feature_extractor.py | jacke121/MBMD | 2daf5edb4fb40ee652baead4f9332ca00fa111a5 | [
"MIT"
] | 60 | 2018-09-18T00:29:50.000Z | 2021-02-22T03:55:19.000Z | from object_detection.models.ssd_mobilenet_v1_feature_extractor import SSDMobileNetV1FeatureExtractor
from object_detection.models import feature_map_generators
from nets import mobilenet_v1
import tensorflow as tf
import tensorflow.contrib.slim as slim
import collections
class MobileNetFeaturePyramidExtractor(SSDMobileNetV1FeatureExtractor):
def extract_features(self, preprocessed_inputs, init_extraction=False):
"""Extract features from preprocessed inputs.
Args:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
feature_maps: a list of tensors where the ith tensor has shape
[batch, height_i, width_i, depth_i]
"""
if init_extraction:
preprocessed_inputs.get_shape().assert_has_rank(4)
shape_assert = tf.Assert(
tf.logical_and(tf.greater_equal(tf.shape(preprocessed_inputs)[1], 33),
tf.greater_equal(tf.shape(preprocessed_inputs)[2], 33)),
['image size must at least be 33 in both height and width.'])
with tf.control_dependencies([shape_assert]):
with slim.arg_scope(self._conv_hyperparams):
with tf.variable_scope('MobilenetV1',
reuse=self._reuse_weights) as scope:
_, image_features = mobilenet_v1.mobilenet_v1_base(
preprocessed_inputs,
final_endpoint='Conv2d_13_pointwise',
min_depth=self._min_depth,
depth_multiplier=self._depth_multiplier,
scope=scope)
feature_head = image_features['Conv2d_13_pointwise']
feature_head = slim.conv2d(
feature_head,
512, [3,3],
stride=1,
padding='SAME',
scope='Conv2d_Append_1x1_256'
)
feature_head = tf.nn.avg_pool(feature_head, strides=[1,1,1,1], ksize=[1,4,4,1],
padding='VALID', )
return feature_head
else:
preprocessed_inputs.get_shape().assert_has_rank(4)
shape_assert = tf.Assert(
tf.logical_and(tf.greater_equal(tf.shape(preprocessed_inputs)[1], 33),
tf.greater_equal(tf.shape(preprocessed_inputs)[2], 33)),
['image size must at least be 33 in both height and width.'])
bottomup_features_names = [ 'Conv2d_11_pointwise', 'Conv2d_13_pointwise']
num_appended_layers = 0
#appended_channel_num = [512, 256, 256, 256]
appended_channel_num = [512]
with tf.control_dependencies([shape_assert]):
with slim.arg_scope(self._conv_hyperparams):
with tf.variable_scope('MobilenetV1',
reuse=self._reuse_weights) as scope:
_, image_features = mobilenet_v1.mobilenet_v1_base(
preprocessed_inputs,
final_endpoint='Conv2d_13_pointwise',
min_depth=self._min_depth,
depth_multiplier=self._depth_multiplier,
scope=scope)
topdown_features = self._topdown_feature_maps(
image_features,
bottomup_features_names=bottomup_features_names,
num_appended_layers = num_appended_layers,
appended_channel_num = appended_channel_num)
return topdown_features.values()
def _topdown_feature_maps(self, image_features, bottomup_features_names,num_appended_layers=2,
appended_channel_num=256, stride=2, topdown_channel_num = 512):
""" Building a top down feature pyramid.
Args:
image_features: a dictionary of input bottom_up features with layer names being the keys
bottomup_features_names: a list of names of selected bottom_up features, which are combined
with top down features through a lateral connection. The names are sorted from bottom
layers to top layers.
num_appended_layers: number of layers which are appended to the last bottom up features.
Each of the appended layers consists of a 3x3 conv2d followed by a batch_norm and a relus.
Together with the selected bottom up features, they construct base features of top down branch.
appended_channel_num: number of channels of output features in appended layers. Could be a scalar or
a list of length num_appended_layers.
stride: stride of the appended layers with respect to the input features.
topdown_channel_num: number of channels of the output features in the top down branch. Since topdown
feature pyramid has the same channel number. This should be a scalar. Topdown layers are firstly
resized with nearest neighbor method to have the same with the lateral features and then combined
with them through element-wise addition. The lateral features are obtained by applying 1x1 conv2d
with no nonlinearity to the corresponding bottom up features
Returns:
topdown_features: An ordered dictionary of the top down feature pyramid.
"""
# if isinstance(appended_channel_num, list) and len(appended_channel_num) != num_appended_layers:
# raise RuntimeError('appened_channel_num should have the length of num_appended_layers')
# append layers
feature_head = image_features[bottomup_features_names[-1]]
appended_features = dict()
appended_features_names = list()
for index in range(num_appended_layers):
if isinstance(appended_channel_num, list):
num_channel = appended_channel_num[index]
else:
num_channel = appended_channel_num
layer_name = 'Append_{}_Conv2d_3x3_{}'.format(index, num_channel)
feature_head = slim.conv2d(
feature_head,
num_channel, [3,3],
stride=stride,
padding='SAME',
scope=layer_name
)
appended_features[layer_name] = feature_head
appended_features_names.append(layer_name)
# top down branch
bottomup_features_names += appended_features_names
image_features.update(appended_features)
topdown_features = list()
topdown_features_names = list()
# init top_down feature
level_ind = len(bottomup_features_names)-1
layer_name = 'TopDown_{}_Conv2d_3x3_{}'.format(level_ind, topdown_channel_num)
feature_head = slim.conv2d(
feature_head,
topdown_channel_num, [3, 3],
stride=1,
padding='SAME',
scope=layer_name
)
topdown_features.append(feature_head)
topdown_features_names.append(layer_name)
level_ind -= 1
for bottomup_feature_name in bottomup_features_names[-2::-1]:
layer_name = 'Lateral_{}_Conv2d_1x1_{}'.format(level_ind, topdown_channel_num)
lateral_feature = slim.conv2d(
image_features[bottomup_feature_name],
topdown_channel_num, [1, 1],
padding='SAME',
scope=layer_name)
output_size = lateral_feature.get_shape().as_list()[1:3]
if output_size[0] != feature_head.get_shape().as_list()[1]:
feature_head = tf.image.resize_images(feature_head, output_size,
method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
feature_head = slim.conv2d(
feature_head,
topdown_channel_num, [3,3],
padding='SAME',
scope='TopDown_{}_Conv2d_3x3_{}'.format(level_ind, topdown_channel_num)
)
layer_name = 'TopDown_{}_Add_{}'.format(level_ind, topdown_channel_num)
feature_head += lateral_feature
topdown_features.append(feature_head)
topdown_features_names.append(layer_name)
level_ind -= 1
return collections.OrderedDict(
[(x, y) for (x, y) in zip(topdown_features_names[-1::-1], topdown_features[-1::-1])])
class MobileNetBoxFeatureExtractor(SSDMobileNetV1FeatureExtractor):
def extract_features(self, preprocessed_inputs):
"""Extract features from preprocessed inputs.
Args:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
feature_maps: a list of tensors where the ith tensor has shape
[batch, height_i, width_i, depth_i]
"""
preprocessed_inputs.get_shape().assert_has_rank(4)
shape_assert = tf.Assert(
tf.logical_and(tf.greater_equal(tf.shape(preprocessed_inputs)[1], 33),
tf.greater_equal(tf.shape(preprocessed_inputs)[2], 33)),
['image size must at least be 33 in both height and width.'])
bottomup_features_names = ['Conv2d_11_pointwise', 'Conv2d_13_pointwise']
num_appended_layers = 4
appended_channel_num = [512, 256, 256, 256]
with tf.control_dependencies([shape_assert]):
with slim.arg_scope(self._conv_hyperparams):
with tf.variable_scope('MobilenetV1',
reuse=self._reuse_weights) as scope:
_, image_features = mobilenet_v1.mobilenet_v1_base(
preprocessed_inputs,
final_endpoint='Conv2d_13_pointwise',
min_depth=self._min_depth,
depth_multiplier=self._depth_multiplier,
scope=scope)
topdown_features = self._topdown_feature_maps(
image_features,
bottomup_features_names=bottomup_features_names,
num_appended_layers=num_appended_layers,
appended_channel_num=appended_channel_num)
return topdown_features.values()
def _topdown_feature_maps(self, image_features, bottomup_features_names, num_appended_layers=2,
appended_channel_num=256, stride=2, topdown_channel_num=256):
""" Building a top down feature pyramid.
Args:
image_features: a dictionary of input bottom_up features with layer names being the keys
bottomup_features_names: a list of names of selected bottom_up features, which are combined
with top down features through a lateral connection. The names are sorted from bottom
layers to top layers.
num_appended_layers: number of layers which are appended to the last bottom up features.
Each of the appended layers consists of a 3x3 conv2d followed by a batch_norm and a relus.
Together with the selected bottom up features, they construct base features of top down branch.
appended_channel_num: number of channels of output features in appended layers. Could be a scalar or
a list of length num_appended_layers.
stride: stride of the appended layers with respect to the input features.
topdown_channel_num: number of channels of the output features in the top down branch. Since topdown
feature pyramid has the same channel number. This should be a scalar. Topdown layers are firstly
resized with nearest neighbor method to have the same with the lateral features and then combined
with them through element-wise addition. The lateral features are obtained by applying 1x1 conv2d
with no nonlinearity to the corresponding bottom up features
Returns:
topdown_features: An ordered dictionary of the top down feature pyramid.
"""
if isinstance(appended_channel_num, list) and len(appended_channel_num) != num_appended_layers:
raise RuntimeError('appened_channel_num should have the length of num_appended_layers')
# append layers
feature_head = image_features[bottomup_features_names[-1]]
appended_features = dict()
appended_features_names = list()
for index in range(num_appended_layers):
if isinstance(appended_channel_num, list):
num_channel = appended_channel_num[index]
else:
num_channel = appended_channel_num
layer_name = 'Append_{}_Conv2d_3x3_{}'.format(index, num_channel)
feature_head = slim.conv2d(
feature_head,
num_channel, [3, 3],
stride=stride,
padding='SAME',
scope=layer_name
)
appended_features[layer_name] = feature_head
appended_features_names.append(layer_name)
# top down branch
bottomup_features_names += appended_features_names
image_features.update(appended_features)
topdown_features = list()
topdown_features_names = list()
# init top_down feature
level_ind = len(bottomup_features_names) - 1
layer_name = 'TopDown_{}_Conv2d_3x3_{}'.format(level_ind, topdown_channel_num)
feature_head = slim.conv2d(
feature_head,
topdown_channel_num, [3, 3],
stride=1,
padding='SAME',
scope=layer_name
)
topdown_features.append(feature_head)
topdown_features_names.append(layer_name)
level_ind -= 1
for bottomup_feature_name in bottomup_features_names[-2::-1]:
layer_name = 'Lateral_{}_Conv2d_1x1_{}'.format(level_ind, topdown_channel_num)
lateral_feature = slim.conv2d(
image_features[bottomup_feature_name],
topdown_channel_num, [1, 1],
padding='SAME',
scope=layer_name)
output_size = lateral_feature.get_shape().as_list()[1:3]
if output_size[0] != feature_head.get_shape().as_list()[1]:
feature_head = tf.image.resize_images(feature_head, output_size,
method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
feature_head = slim.conv2d(
feature_head,
topdown_channel_num, [3, 3],
padding='SAME',
scope='TopDown_{}_Conv2d_3x3_{}'.format(level_ind, topdown_channel_num)
)
layer_name = 'TopDown_{}_Add_{}'.format(level_ind, topdown_channel_num)
feature_head += lateral_feature
topdown_features.append(feature_head)
topdown_features_names.append(layer_name)
level_ind -= 1
return collections.OrderedDict(
[(x, y) for (x, y) in zip(topdown_features_names[-1::-1], topdown_features[-1::-1])])
| 53.02381 | 114 | 0.610174 | 1,732 | 15,589 | 5.191109 | 0.116051 | 0.045601 | 0.042042 | 0.018685 | 0.942276 | 0.942276 | 0.938716 | 0.913691 | 0.913691 | 0.913691 | 0 | 0.02098 | 0.324267 | 15,589 | 293 | 115 | 53.204778 | 0.832542 | 0.244724 | 0 | 0.846154 | 0 | 0 | 0.06224 | 0.018654 | 0 | 0 | 0 | 0 | 0.043269 | 1 | 0.019231 | false | 0 | 0.028846 | 0 | 0.081731 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fca9e550b340e7aeacf10efc07a69585288d0f3d | 72,075 | py | Python | utils.py | glkc/Solar-Heat-Transfer-Simulator | 882781637c9782a71c2ae196e13185a04cecfa09 | [
"MIT"
] | null | null | null | utils.py | glkc/Solar-Heat-Transfer-Simulator | 882781637c9782a71c2ae196e13185a04cecfa09 | [
"MIT"
] | null | null | null | utils.py | glkc/Solar-Heat-Transfer-Simulator | 882781637c9782a71c2ae196e13185a04cecfa09 | [
"MIT"
] | null | null | null | import argparse
class ChoiceRange(object):
def __init__(self, mn, mx, name):
self.min = mn
self.max = mx
self.name = name
def __eq__(self, other):
return self.min <= other <= self.max
def __contains__(self, item):
return self.__eq__(item)
def __str__(self):
return '{2} [{0},{1}]'.format(self.min, self.max, self.name)
def __iter__(self):
yield self
def get_user_input():
parser = argparse.ArgumentParser(description="User Input for the Simulation of Solar water heater")
parser.add_argument("-ca", "--collector_area_sqm", help="Area of the Solar Panel in Sq.Mt.", type=float, default=5)
parser.add_argument("-ce", "--collector_efficiency", help="Efficiency of the Solar Panel", type=float, default=0.9,
choices=ChoiceRange(0., 1., "COLLECTOR_EFFICIENCY"))
parser.add_argument("-tf", "--transmission_friction_m", help="Friction of the pipe in Mt.", type=float, default=4)
parser.add_argument("-pp", "--pump_power_hp", help="Power of pump in HP", type=float, default=2)
parser.add_argument("-pe", "--pump_efficiency", help="Efficiency of the Pump", type=float, default=0.8,
choices=ChoiceRange(0., 1., "PUMP_EFFICIENCY"))
parser.add_argument("-tv", "--tank_vol_l", help="Volume of Tank in Lit", type=float, default=25)
parser.add_argument("-ti", "--tank_in_flow_height_m", help="Inflow height for tank in Mt", type=float, default=1.8)
parser.add_argument("-to", "--tank_out_flow_height_m", help="Outflow height for tank in Mt", type=float, default=.8)
parser.add_argument("-tl", "--tank_heat_loss_factor", help="Heat Loss Per Second factor for tank", type=float,
choices=ChoiceRange(0., 1., "TANK_HEAT_LOSS_FACTOR"), default=0.001)
parser.add_argument("-it", "--init_temp_k", help="Initial temp in Kelvin", type=float, default=300)
parser.add_argument("-s", "--steps", help="Number of steps to run the simulation", type=int, default=3600)
return parser.parse_args()
def convert_mt_to_ft(val):
return 3.28084 * val
def get_radiation_data():
# TODO: use weather TMY file?
# using simulated solar radiation data
data = [300, 299.84289235676545, 298.52245326564173, 299.4582771432361, 299.24461272948366, 299.54535554619923, 299.9163184538746, 298.59725638062434, 300.02510618318735, 300.54936761320914, 301.3582759097539, 301.09882857451805, 300.31608036044906, 299.61299941306925, 298.917665235649, 298.84146411059726, 299.92235399510895, 301.2847439959279, 301.8201407532942, 302.2022884666113, 303.00426891116643, 303.28712358910235, 303.5159184861406, 303.6533522681597, 304.84933704790876, 305.49006458370627, 304.1301069171443, 302.96833616700906, 302.09511792184776, 301.4661509689152, 302.5801814667041, 303.06615406160927, 301.8704407146108, 300.99847497401555, 300.9223142646688, 300.6916670977684, 299.4427408069982, 300.0091743554491, 299.7141200405183, 300.7954745499752, 299.51356521029663, 300.791998385492, 301.8757420801607, 301.47533224419607, 300.43331074165485, 299.7537334041794, 298.5954673803252, 297.6523735878969, 298.35481611946716, 299.2342322890987, 298.5696395999951, 297.2586409471172, 296.93840396194724, 295.8896625058037, 296.5999384614821, 297.0214842943716, 297.6391390845424, 297.98124974450434, 297.07435571158027, 296.47347840729475, 295.71619866601344, 295.156641506598, 294.86767650337464, 293.71212719446754, 292.6372017310606, 292.73226314224496, 291.4747797748347, 290.70688652481715, 291.79730071440576, 291.5275781697959, 292.03556106556135, 291.9184335273873, 291.3119283338212, 291.9584814034567, 291.76738688919465, 290.52732001190844, 290.79640359970364, 289.54005957116675, 289.4778065759261, 288.4555935260912, 288.34697358200515, 286.9202648665483, 286.48586840531715, 285.9118947412858, 284.66093239501527, 284.7300885881771, 283.3196249168869, 282.7844348494845, 283.0608390415158, 282.43868247367675, 281.055526510095, 279.688091377984, 280.9799355305604, 282.13510343710783, 283.05046091824056, 281.97459050959367, 281.78535515211587, 283.11473829610156, 284.5162558003115, 283.1913856753812, 282.4101014858249, 281.70565547778284, 280.9350116391714, 280.8106969572813, 279.67355335984814, 279.5290521371215, 279.394007435663, 280.7335677919887, 280.34314603190643, 281.1130962627105, 280.3306364588855, 279.1967629210952, 280.2510426475426, 281.61543690624933, 281.5495016009274, 282.6527119807254, 281.7963510889265, 280.57264377986246, 281.24539096411416, 282.421830869941, 282.59041225334283, 281.3820337195917, 281.6430738755703, 281.897446468379, 281.5574134391711, 282.9340751904603, 282.8497634639015, 281.81578403919923, 282.25317664982424, 283.57370690216936, 284.30916811424845, 284.33105252165075, 285.3332581526934, 286.17488242593686, 286.4770506220786, 286.82154340839804, 287.8368888718111, 286.4586554788982, 287.4272989363356, 288.0483801736338, 288.1821433732246, 287.86954013524945, 289.1161538529868, 289.50953663381483, 289.3179746808704, 289.1217710101183, 289.65981808176025, 291.06483973480124, 290.77095189120587, 289.61484963984543, 288.3433920336694, 289.76090553836406, 289.14929024753957, 290.5350251636149, 289.0944916565767, 289.1164472909999, 288.21507992791595, 288.8948132985006, 289.42669925209015, 288.41898634230375, 287.3532955625985, 287.2508478646658, 286.3932562221918, 286.5480208647642, 285.1230341586393, 286.53475117319107, 287.40900904701954, 286.3640581905162, 287.3194399788714, 288.66783509967195, 287.82798523498815, 289.11735139732724, 289.5917607932917, 289.41003065076507, 289.99944983293176, 289.0153026821541, 289.8503639588492, 290.9230242644406, 289.71471160527216, 288.5149459112925, 288.7198728743488, 289.07920375742026, 290.0795994029386, 290.51526583098007, 291.196453380844, 290.66563386742365, 290.88960438254713, 291.37510052010913, 290.812206752741, 290.9727217331957, 291.36052869166906, 290.81442119974537, 290.3905458506784, 289.24724417997777, 288.7546079280493, 287.805578396821, 287.9170713228463, 288.6199497646049, 288.55700804253985, 288.1745262072203, 287.7823870369176, 287.4467599565678, 286.45508379632247, 286.8982846870047, 287.6899650968539, 286.7493078192803, 285.4965430155856, 286.6963256274183, 287.85151483973397, 286.78775499096685, 287.68601202001685, 287.39029413775796, 288.15408081892014, 288.2376600327541, 288.5566388317308, 288.4177582837034, 288.89123618505914, 290.07428625012255, 291.22806899079006, 289.967935335724, 288.5800449914869, 288.3059857312162, 287.65091963340876, 288.6435809864359, 287.6909018972035, 286.95196598529066, 286.86947497074266, 285.71432516951535, 285.25608767330124, 284.1323880835289, 284.1182230541253, 285.30300794556746, 286.2026713031265, 287.06198027039756, 287.6053682040863, 288.0462707356294, 288.274679082808, 288.8966481463883, 289.4344044076717, 290.0543391363759, 289.4130467458009, 289.90689898947517, 289.4804481835815, 290.63521879886844, 291.87397781543666, 292.934648392839, 291.89550938375965, 291.66492872569967, 291.4297802733046, 292.8649734289617, 294.11843696407243, 295.4066910609406, 293.93689325991227, 292.93075400461487, 292.26227818397484, 293.23266344415526, 293.6211187928841, 293.848916276729, 293.64117006931025, 293.8791793407476, 295.1319613413667, 294.841337289792, 294.7548266819648, 296.0247635092236, 296.6515133657537, 296.28995613039655, 296.0061063619694, 296.4979383564176, 295.4163118504455, 296.8357974208669, 295.9171672415986, 294.89221410194085, 295.79980281779103, 296.56906138996766, 297.1523278699372, 297.43964918203255, 297.55558138885834, 297.7086564903044, 298.6138986440381, 299.93918226097804, 299.9225647178487, 299.4513050870111, 299.7015986699142, 299.4387907666162, 300.6760276610796, 300.48208223913036, 301.8796420564265, 302.2581786676439, 301.7527347039134, 301.41993111388365, 302.82944887595613, 303.8523160233168, 303.9901238287382, 304.9607220319524, 304.58839040888387, 303.72363751040757, 302.2800577468899, 300.87089180421947, 301.9904171186038, 301.3521040009079, 300.94056019518706, 300.87888291001826, 299.4946702692957, 298.63806629898994, 299.2445480391113, 298.46815023769597, 297.87115181879403, 296.884742672085, 296.96456053480904, 297.17059903724464, 296.969287430098, 297.86282623731034, 298.52219937087375, 299.91417534160666, 300.4754194219632, 301.5287900589988, 302.9956902332182, 303.12318348214114, 302.67248258356415, 302.0230841410161, 301.50828565310786, 300.3392395891375, 299.63419681907044, 299.78814708582667, 300.5851462438388, 299.407846729249, 299.9312298494246, 299.0684188316923, 300.378864215719, 301.1976546295586, 302.29629494990087, 303.44001246022094, 303.1752706483158, 302.74663495596025, 301.5420197145748, 301.06806075856167, 300.71634762692327, 301.68157329083095, 301.142787956687, 300.0295964511395, 300.6702126639586, 299.5977642652644, 299.53540022464, 300.4355471371539, 300.8865881517585, 300.086466661018, 299.83917232152584, 301.02156786658276, 302.26427920796704, 303.6223154309423, 304.11265308371907, 303.8875221311196, 303.0376629915866, 303.08428144480314, 303.4495393659662, 304.1098005732039, 304.8640107162058, 303.4063349870938, 302.3615874599992, 302.2430588416724, 303.0898936751043, 303.66904671173074, 304.0586386053395, 303.19335999760165, 304.1913215183098, 305.12482053408496, 304.17320982892016, 303.7517143066333, 305.2090577623285, 305.7480009754076, 304.63112418186284, 304.0213382017678, 304.20876815243565, 303.48423379691786, 303.9148281061128, 303.2459650624587, 304.33346955530646, 305.5382861515938, 306.09667687844785, 304.6398440193516, 304.06868501260885, 303.7697123676733, 302.8921612215508, 302.27503536041723, 303.75217068918323, 304.63139455543904, 303.9108574532834, 303.6248821986557, 304.723454399126, 303.4125727654435, 303.7400883358457, 302.25555471583465, 302.1806202507936, 302.292272555082, 301.2026095786017, 302.63092396221055, 303.49348354253, 303.20984617723843, 303.5047584632282, 302.31136247605866, 303.7973558506112, 304.38889165041, 304.08179928398346, 304.75141144377505, 304.3391412948448, 303.1294277694171, 303.37748371474186, 303.5912863177261, 302.09507925955205, 302.4024746743596, 301.0339747756527, 300.8453988034536, 299.93046355241654, 299.3051204490723, 298.8031858182166, 300.0909884069231, 301.10028126694743, 300.99529044430363, 300.513160241321, 300.2463251895916, 301.50871236936433, 300.7115494423038, 299.9267299672857, 298.42968242318955, 298.53091700369237, 299.4959641278469, 299.140557056915, 299.28720328318025, 299.5192767177534, 298.25104779781486, 299.167921203779, 299.934624860136, 300.14854176906533, 300.64366518606715, 299.2400272089142, 300.5251386042595, 299.0874575269459, 298.5222488121814, 298.73942061150086, 297.9230409578675, 298.54461352918344, 298.54624712859993, 297.10773073452617, 296.86933847414844, 296.71917935262053, 296.544767031751, 295.76406303904935, 294.63828695271303, 296.04850776946165, 295.53105244824883, 295.84208403084557, 296.0328150993893, 297.03733796713095, 298.2343155814009, 297.6321961870936, 298.68120339244285, 299.8441288912406, 298.5407791873936, 299.87509336596435, 298.9356695712883, 299.30676003034995, 298.9990464628468, 299.35897021160076, 299.60294987098655, 298.1144667847239, 296.8505803231645, 296.4331853802997, 296.2984532315518, 295.22792397509085, 294.6642188051121, 293.68726763155115, 292.2619685554659, 290.85794359859955, 291.9862481594885, 293.1689575735158, 294.39117876522465, 294.7789758314702, 294.9386676059024, 293.83597799028234, 293.8652319415298, 293.2099830002928, 294.22872143965475, 294.85362493711233, 293.492217080123, 292.72330585294804, 292.49235552544496, 292.74627902554, 293.5517861902072, 293.61278594938415, 293.7658914853127, 293.41230008855507, 294.5021126194921, 294.9153870626655, 294.5115049218314, 294.83467294291603, 293.49879956463815, 293.97152894353474, 294.25823224322045, 293.48373245872045, 293.37925871210166, 292.9240047226764, 292.0553331347636, 291.3194289144555, 290.44072088012217, 291.0522955275841, 291.97183463516, 292.7209716388495, 294.1842369811725, 294.6308373370204, 294.84952645224854, 295.7568856533646, 294.5714270098545, 295.21362305988504, 294.365314533415, 293.07269727424875, 294.1343245212384, 293.321610260268, 293.5850541180978, 293.8237767627692, 292.4463817981013, 293.1151834646538, 293.3765727960843, 293.1744640074299, 291.73660041486886, 290.88749023912146, 290.5827395542084, 290.3133241386397, 291.27900368762914, 291.2849658126814, 290.13047478255305, 290.83383777926474, 292.11332917862387, 293.57063252971915, 293.0966616373966, 294.3924234143183, 295.3028441257362, 294.95851047297657, 296.3766109650743, 297.57510356720786, 297.57865535326795, 298.7624507905501, 298.5941116020915, 299.0168734122178, 299.2078147063599, 299.94868288292196, 300.33629099623585, 301.7300883158806, 301.5216817382999, 301.31393457556914, 301.8388776843869, 301.49121485595776, 301.5457077259684, 300.6393642963678, 300.1751554449626, 301.0754141148494, 301.04060867452745, 301.5774894534351, 302.05733129634064, 303.4365557042292, 302.3599118539894, 301.2654912696604, 302.1973095491205, 302.2333635541277, 303.18779396739416, 301.8423673202153, 302.7151945774767, 302.7689609806036, 302.75089256138665, 302.26366571542974, 301.1101171384833, 302.52184321004546, 303.9829344112587, 302.85323413203196, 304.06253458231794, 304.27370965160964, 305.7492440328118, 304.9696930178011, 306.06836880087707, 305.04434338361045, 305.8432993900556, 305.2712869194136, 305.50231142847724, 305.3701306252807, 305.09658538807037, 306.1467063638907, 306.6286135956706, 305.1860134831956, 305.5712235254719, 306.57660173643757, 305.61108536886456, 304.401735477276, 305.36163618400343, 305.63862551994293, 306.88562697681095, 306.2215419761809, 306.7912378748646, 307.4621496915516, 306.786620186062, 308.2687045682594, 308.5501457348568, 309.79029592322814, 309.85760431373745, 310.98740395602135, 311.8870335564697, 313.3913865018435, 314.8848696692917, 315.79844421511893, 314.6213152279806, 316.06556078724844, 316.09492950820794, 315.6475659447468, 314.5652923728998, 315.43639978968207, 315.903039186336, 315.8660304806234, 317.0433011079146, 315.7037251896496, 314.967474799255, 315.7602263517692, 315.9289209847108, 316.97659961438404, 316.2803541388236, 314.9618091451797, 314.03670492472526, 313.9694434110079, 313.76414450150855, 313.22823152685567, 314.2661071407775, 313.2616073111021, 312.64334961912806, 311.9030087350428, 313.2374601625062, 313.92231514862027, 314.79291233301916, 313.8593602794687, 313.6246101603336, 314.60332320342553, 313.3286678873218, 312.343587326228, 312.586740604378, 311.837383460401, 312.69672028895513, 313.2499040235583, 313.0964226822832, 313.0176253081175, 312.5810228020068, 314.1398905733753, 313.9472925009782, 314.93737859635314, 314.3060459787772, 313.5676418216997, 312.0992181639723, 313.330965519969, 313.0633313722192, 314.2096267736169, 315.76396229502376, 315.3291238537962, 316.56392129254124, 317.0252866504178, 316.71934923283595, 317.32353655672114, 318.61770767606635, 318.5092117218268, 319.10565930322605, 319.6918137066929, 319.4929497741563, 318.932567987226, 319.79208697887765, 320.44488333178487, 319.84419390709786, 319.9050551265749, 320.9437014691164, 320.78525681163364, 322.1410353350267, 321.43130023152156, 320.6113091135455, 320.97165061375614, 321.334192235049, 321.70680324405345, 320.7770046395013, 321.61185322236514, 321.18813068268435, 319.7132223840729, 318.50978543011826, 319.12716729308613, 319.02053853458756, 318.3653314436798, 318.0344540468125, 317.30368941935836, 317.19749628880635, 318.7012847256714, 318.88274694374314, 318.8185700713004, 318.19992037905155, 319.2089862334349, 319.6426553833751, 320.80912419727986, 319.6608775693325, 318.14270997074215, 319.601978396945, 320.8224251100305, 319.7043794800187, 321.24118187167664, 320.656442189174, 319.5773431021158, 319.1908535664986, 319.8367517778165, 318.52837103095624, 318.8670822395131, 318.2717948643699, 319.3486084097881, 319.2143041314824, 319.42935136102767, 319.4218765231437, 320.58072042298375, 320.88027822298756, 321.14440464280324, 319.84375341422816, 320.21360417285206, 320.85118154052606, 322.25734140664946, 323.00536440688654, 321.89481289645624, 321.45685561895135, 320.5794174169272, 319.6846992761063, 320.0442024524736, 320.65917112808665, 319.27247052563837, 318.9303996635938, 317.72242850808465, 318.4544331573415, 319.65797444328246, 319.5721919303315, 321.0037070694533, 322.0839818089203, 323.46196344268895, 324.21680281084593, 324.6074465476051, 323.36509133557394, 323.68525213979456, 325.0863268832833, 326.4199910725588, 326.70500168350253, 325.28005012902184, 324.0221930094063, 324.4854125108187, 324.0413222939053, 325.5950612531933, 325.34293332632205, 326.12572627450544, 324.53350917946966, 325.1283387335482, 326.0893950607769, 324.9644216122013, 324.77787241728794, 324.1268678094363, 324.4913638319528, 324.39656272600945, 322.85911547585397, 324.2991465309857, 324.9664389038594, 323.40360968658445, 324.3476832037077, 323.5198631773759, 322.7342500333215, 323.4549768498574, 323.41635911035127, 324.8934048260348, 325.6214776922058, 326.41789628147995, 327.50983560936726, 328.10851393474627, 329.0358236779502, 327.8948346830247, 327.47658260650354, 326.7993825247211, 327.25744419979503, 328.1607373303501, 326.6344308417069, 326.18039887196295, 325.78703150276294, 325.29985017942505, 325.76726687537354, 324.73341727526497, 324.2078049256587, 323.9193448892378, 325.0248218405916, 325.8366014815222, 324.7175536730474, 323.69755271079924, 323.0263174307538, 324.59402056354975, 325.1271211410867, 326.3536365480743, 325.1173201243916, 326.0050583096324, 327.1212820310019, 328.48634222476795, 327.7827448456944, 329.0675350445668, 330.4566204640226, 331.40253662217464, 331.16417643774344, 331.7213169297534, 331.43225411471605, 331.6103400246346, 332.32470602801686, 332.5103828841658, 333.06388372657335, 333.9857462944113, 335.4830441516888, 336.6907399829561, 335.8212490815033, 336.9257688907725, 335.41900632731324, 334.08141209968954, 333.29886340275266, 331.95542761034545, 333.15405850948014, 334.12418189608155, 333.59656820160325, 333.8160267654169, 332.48655053070826, 332.10617002557694, 332.4129029676616, 332.3009237404429, 332.482312617568, 331.419658983511, 332.1243333097251, 333.7651952576746, 334.1196529046979, 332.5649506086024, 331.727459374573, 332.87863322452927, 332.0344715726758, 331.20668690599894, 330.5405272866091, 330.34656349460045, 329.7685367145059, 330.4975019654073, 330.9597091855443, 332.52438773846416, 333.91274347967413, 332.9489135509797, 332.72928069733007, 332.1310915625112, 332.6714896357845, 332.58567602546316, 333.32463763677583, 333.37863821759635, 334.7464255126809, 333.21066994917004, 332.23884266374967, 333.1216224397659, 331.5173330331011, 332.84545557053076, 331.97507804046836, 331.71991970461005, 330.2444208390744, 330.58229395585937, 331.4461042453078, 330.54491566855955, 329.033185968164, 327.9213290552876, 326.64734560789236, 325.4441063663775, 323.8708715664347, 324.74130929222, 323.70568834992133, 323.6262484226118, 324.3905627181655, 325.67417692440984, 325.8829846040407, 326.4595129232989, 325.74712106837194, 326.25191716692166, 324.6869505181442, 325.61246009016327, 325.47568305513016, 326.27730878605826, 327.35518461192464, 328.0053067587798, 327.665457969013, 328.36804438194326, 328.6794477340208, 327.8018739279601, 328.38396213631785, 328.0152090183657, 327.32238436043923, 326.9176819969351, 327.64194489327156, 327.1424307832996, 326.1937490166988, 327.70243615452705, 327.50722961466386, 326.6612247138968, 326.5084734751606, 326.98285700910253, 325.84731635411646, 325.73664906328304, 325.2327313147852, 325.9600632626112, 324.5062204431792, 323.08943439835326, 323.20537571010607, 321.6786049458243, 323.06610060671915, 324.4477526952759, 325.0687375048184, 326.07431499600136, 326.9174884463597, 327.3453349230697, 328.86538437889953, 328.2493022181212, 329.8468487930506, 331.40865546956064, 331.8999476801025, 332.05626267696016, 331.0483669115724, 332.1333904687743, 331.15112222459754, 331.25006566926675, 330.2647593446926, 330.75695871226947, 330.42469870814784, 328.79618966076964, 329.8361696774454, 329.2300683533922, 328.9097223305103, 329.5327185868236, 330.0176714848731, 331.32987705219347, 332.0465239801096, 333.70287943011675, 334.23267349679855, 334.9186852077969, 333.8144583017836, 332.397335233427, 330.85679131437814, 329.346969955947, 330.64904632526645, 329.0898106318237, 327.68044876464694, 326.7116346183714, 327.12081385209876, 328.08281020061094, 329.53561063155996, 329.122217034953, 328.5072747815649, 328.7710265151374, 327.55218601564115, 327.5581957866742, 326.5626504748803, 325.2062657122668, 323.8325313179415, 324.3959541577849, 325.42005407331555, 325.7111044980853, 326.6962913508419, 325.71646760943406, 327.24426011512264, 326.559429711074, 328.0300058595046, 328.389025397498, 329.6408149797118, 329.92174767243984, 330.39167895827336, 329.7025622404656, 330.02213546002537, 330.55193874920104, 330.063998379982, 328.6054515039194, 329.19034895754424, 329.6143274201888, 331.0943108534058, 330.7701886866705, 330.4364517028161, 331.2748290056164, 331.83604211466513, 331.9105302833452, 331.281894532904, 331.5977546636299, 330.4383872724093, 331.27626346387893, 331.5418781219446, 332.4106718549469, 334.0234934002779, 335.6083204336599, 337.17745879557197, 336.17233046173556, 336.94861884868976, 335.87242336668413, 335.14039600834076, 336.5719301882011, 337.5661109703707, 337.40459010192626, 337.97025915651363, 337.9969752289567, 338.3738264189603, 338.1669882907116, 337.66308847656336, 336.97214835002075, 338.5250089720594, 337.6374638475146, 336.98145035926154, 336.84135024366975, 335.18258446881424, 335.3412430184373, 334.74723402957153, 333.7087996948726, 334.5370991225753, 335.44313059292915, 335.41507181564737, 335.90598015376827, 337.17472479870656, 338.30438705660885, 339.9465943172606, 339.9800944606839, 338.3488106436121, 337.301815397323, 338.11260394611764, 339.09533780729413, 339.05735288471647, 337.9467827828118, 337.9664441859488, 337.09662711302644, 335.8472188134368, 334.4424648697312, 333.49835182451636, 333.480337294992, 333.4165695488827, 334.02530139136013, 335.1737266111061, 335.0668239484321, 335.7823195435598, 335.1760167662679, 335.1890341333505, 336.50606525871683, 335.35276927194593, 334.65620619313904, 333.27304966434997, 331.81256566074006, 332.60888840145356, 334.0500436378297, 333.60660414491565, 333.0378539957009, 333.87166513357573, 334.5949620258408, 333.246341857736, 331.9561220264204, 332.41145060596176, 332.27537449158916, 333.0096832976979, 333.86246953398904, 332.5585947967719, 331.1292615517857, 330.1757304904286, 330.2338128346705, 331.65292489108026, 332.04990714719446, 330.40779860723, 331.3222591280169, 330.00453429733227, 329.91644447496697, 328.98169870057916, 329.29126424634217, 329.5629181198644, 330.29242315895027, 331.2026515318634, 332.0514875151235, 330.67081814589466, 331.192002746853, 330.280032576777, 331.30958943349066, 332.23265481883374, 330.59936124918636, 330.5211935954283, 329.01715458982113, 330.37623037621285, 329.4305042680838, 329.984425775032, 329.6287168219291, 330.7119832386251, 330.103684842425, 330.1231808602139, 330.65366257107814, 331.8462461561594, 330.8092761057007, 332.3463516565536, 331.54696433644403, 332.3272916074533, 332.77155858230327, 334.1426998796128, 334.06825010521925, 332.8158258701106, 332.73212543690994, 332.0434362224694, 330.91045172266945, 329.26499385290305, 330.22633504483184, 330.0559471063116, 329.80345300324433, 330.1718796733264, 331.79589150099787, 332.9563349756329, 331.54457344082886, 331.3241780294972, 330.9456828319151, 330.95945755574866, 330.03721478661726, 331.55276078856843, 332.3655697874925, 333.81251333734946, 334.2336417056774, 333.42553661223076, 333.79403158574524, 334.6544505696296, 333.5296815847255, 332.3290613851134, 331.2070077451916, 330.08484228074843, 328.9534708012348, 327.32220084566364, 326.8890840720769, 325.39291438163383, 325.2487422189259, 324.01504159066127, 324.4091333930816, 325.9138046879798, 326.92648115322066, 327.9076849209117, 328.54428992146586, 327.4539319976077, 327.9654700470911, 329.4924156018477, 329.9694190510507, 328.82405186402616, 329.829474799537, 328.25385152597585, 329.39588740611504, 329.89661144321616, 330.20167071861096, 328.9382669096488, 329.8103689221721, 328.5821838277184, 327.79485161762733, 327.8165342676284, 328.79327591087065, 327.8247157826878, 326.2697305696797, 325.78679611226266, 325.4469984789416, 325.2336092410812, 325.9783258624521, 324.9096094384259, 323.29277757919334, 324.8523275409751, 324.45468377324397, 324.54628339890064, 324.1822327768368, 323.82876654066433, 322.3653372325489, 323.76625587697487, 325.1704274430401, 324.6089904713875, 323.52092676638887, 322.2313968681254, 321.65471778666586, 321.0939902552704, 321.72119754829794, 320.1734904286342, 321.6194031257625, 323.1054312889598, 324.201704020413, 323.4770873060801, 322.00584690702766, 320.88838167812384, 321.62765753454715, 320.3974761200888, 319.1749235503933, 318.6523834137411, 317.91076847393884, 317.7417190778751, 316.6417245007696, 315.09329026000023, 315.6150433902338, 314.99737644674764, 315.2912425001317, 313.7433706352239, 314.74659653644915, 315.3763270705256, 316.2001457317075, 316.6702652920711, 317.97474750040794, 317.7391726428334, 317.2218615456327, 318.04773314829504, 319.4853504533721, 320.086899625251, 320.9408902149441, 320.7796029631804, 321.6300708270355, 320.73083987897616, 319.5490176542962, 320.81783964242925, 320.1822038845727, 320.2935266832337, 318.75695199865294, 319.9924349789492, 318.4680965602126, 318.10958077036554, 316.859926370955, 315.5216044953502, 315.67839808527947, 316.5844856415096, 317.72657517474903, 318.8304839322443, 318.2432779925434, 317.8561626313113, 316.4151571133109, 315.61245827647184, 315.7263906953991, 315.62740481860067, 314.76006546120794, 314.9484545508863, 314.0620381736392, 313.7661119659698, 314.9398897369643, 316.12080621122993, 314.88124453492395, 313.31669629958736, 314.7543597764465, 315.9092036707328, 316.07911054188213, 315.203569551961, 313.74829644437506, 313.14612821403216, 313.99260355975065, 312.9915287903878, 312.8898325592951, 314.22417668512355, 315.25009156194284, 314.67044916071785, 315.2843361716342, 314.6358626773988, 314.7141997881472, 315.68135039444013, 314.4617790791749, 313.51531753580366, 313.88312536046, 312.5077987107821, 313.5946418694696, 314.22744947812777, 315.0760685641837, 314.2044167879584, 314.93677803580874, 313.824426930715, 312.71778697133936, 313.3504458863514, 311.912899759309, 312.3367081569635, 310.828829884996, 310.50003259395316, 310.16423866641736, 310.19005526940015, 309.3628672158335, 309.59233937954906, 310.47556304161236, 310.11906966010963, 311.21000798279397, 312.76186814603994, 312.3845174522809, 313.25224628046243, 314.39531230989843, 315.1848398744544, 313.6423680733371, 313.2713185140979, 312.11852554783644, 313.2703990101888, 313.65924308841375, 314.6526064884288, 315.5069230796592, 314.0206967375966, 314.62294837815324, 314.283374937866, 312.7737083627264, 312.4812571014156, 311.49132236195516, 312.0679160942604, 311.29661924004364, 309.7665167411036, 310.4182462062505, 310.93189940498337, 312.04206730147956, 312.20611036512196, 312.04237006288145, 311.4425303908697, 312.4257487752489, 311.60735201286553, 310.54580700062104, 310.7355631943029, 309.30411250610456, 308.48332678465175, 308.54079833642885, 308.0632554820881, 306.52753888911127, 307.7699755792166, 308.3475664078009, 307.60745795140014, 307.2217699375177, 308.5112846415934, 308.77925561311974, 307.93583481633027, 306.5043240285352, 306.3437380385874, 307.5008778620342, 308.1501546803405, 308.8077634079772, 307.92565158288767, 307.1064564900044, 306.5234570753484, 307.6462898410873, 307.5445500315918, 306.3413052231496, 306.7709319603994, 307.11166237458184, 306.5411583577034, 307.9297137511138, 308.49906879399583, 309.9393303362207, 311.2668038425391, 311.5805422261368, 312.0363032422077, 310.8360904587586, 311.92575053490975, 311.60639837244844, 310.8908319631857, 310.60818127920675, 310.8414009465543, 311.74546607788534, 310.20226912508633, 311.1077571586295, 309.9027665107331, 310.84391114314855, 310.6199339883159, 309.3454271230025, 310.56518762595624, 309.8115906529915, 310.5338810792072, 310.5816901165872, 309.20978384798394, 309.6789857747309, 308.498853509313, 308.305518425098, 308.1781046369848, 308.46962410718913, 308.64105549558974, 310.02976435078847, 310.4328540641079, 311.6520272562307, 312.5131571971675, 313.8907919053423, 312.86032792910214, 313.28345848745823, 313.85966588608926, 312.6508420597313, 312.47998534275774, 312.7890769793035, 312.0512479864471, 312.38225402945574, 311.0512056549133, 311.5139551137573, 310.16414434503827, 309.69081629571616, 309.266756843508, 310.62412649581375, 310.79347034920056, 312.1843043560122, 311.75116418562806, 312.8648084181653, 312.04212540380684, 312.4029473863219, 313.08067945129034, 311.7050554076411, 310.82594229990667, 310.18467713789596, 309.92576101738956, 309.2247972322877, 308.27110413751575, 308.0301287445077, 307.1248552322472, 307.36772017309823, 307.7314521776293, 307.62028742164716, 307.8663862652313, 309.2808274362055, 308.4520247931755, 308.437222640991, 309.5367033796235, 309.1859758538423, 310.4787112108544, 311.23198901569776, 311.48339224231665, 310.79578535673807, 309.82787692427735, 309.6611404268288, 309.88744658639473, 308.9868502650174, 309.13643987249014, 310.0339125852739, 308.48413317373576, 309.9410879103959, 309.108345108775, 308.3852961978603, 307.51786733201885, 308.1282404455477, 307.3793152779195, 306.40717565143865, 306.27949923950274, 305.1418493681874, 305.8423611219929, 305.2158148639071, 306.3298851531185, 305.2889122049813, 306.39053732845775, 307.37857527054973, 306.12532198910554, 304.71898410385995, 303.99367009255633, 305.14014463364714, 304.5785775113015, 304.43520406948267, 303.32762350320576, 302.6651164251717, 302.5713669161944, 303.95866313220944, 304.42971579948926, 305.56326960900424, 305.491990525387, 305.7765628782429, 304.29451713164065, 303.55658928378176, 304.7264188754237, 305.73108439315257, 306.00375238794135, 306.3479053566221, 305.67837328137233, 304.4535586978252, 304.28537447950976, 305.6923349063061, 305.1273311500866, 306.52022534895696, 305.0002098542281, 306.04756916915426, 306.9252565260532, 305.8702337498736, 305.14045655907483, 305.8079202489487, 306.741598107023, 305.61022667806674, 306.8705538257567, 305.5714882056176, 305.1415710436808, 306.13367912685004, 306.45303357052154, 307.14699785946397, 306.92694975123334, 308.1868118337726, 308.01329732880413, 306.7195073895775, 307.3640638900889, 307.5620271599715, 306.66070667553123, 308.19205505361447, 309.61627489520197, 310.0638832929697, 310.5927765192044, 309.7782455080652, 308.90319864542386, 309.8968102663816, 309.73285450428347, 310.382804592083, 309.5064147135115, 309.707109349923, 310.1781721948907, 311.562561001908, 312.6530726171729, 313.68718910659726, 312.7205772286117, 311.4107498921754, 309.91418582154006, 309.94259124490986, 310.3539344718752, 309.89008782743537, 308.4329646142077, 307.58857781950326, 306.9201141432449, 306.103711259756, 307.3666231561777, 307.44874122124907, 307.3309005410495, 307.8031032292758, 306.73919924419556, 307.7197876524564, 309.2435310446344, 307.8775795547585, 307.6561672476653, 308.1034013833765, 309.38545605749664, 308.3495615874285, 309.2206450304152, 309.10553486018256, 309.64960382313217, 308.7897227845907, 308.8073835831323, 307.4901267957228, 307.28731984675204, 306.6608917739643, 305.58310440542704, 306.2171609253547, 305.57368807486796, 304.6130989811795, 305.89914995909663, 304.76475440078906, 304.7860456654056, 303.6352309078799, 302.94539742318904, 303.7100395537649, 303.34376605417077, 303.19051858243375, 303.3451088960485, 302.1669707079772, 300.98501057894816, 299.5944876724542, 300.2351823339716, 300.52016709332867, 299.5181306439559, 299.92251206683477, 301.24264950700103, 300.63294861261807, 300.23020148117524, 300.4754067956448, 301.73770968624285, 301.8181199500507, 302.5205038293387, 301.6313058987744, 301.67762432753767, 300.7621148587233, 301.13533211326904, 302.1283123127263, 300.6227195468803, 301.96238765995133, 302.08894914342443, 302.39867088511346, 302.86578809837437, 303.2105959437682, 303.2005493426544, 302.06991978175677, 302.08916221005643, 302.1173769829317, 302.87180151092207, 303.32143139275934, 302.4111879855384, 300.9040158372221, 300.66101056806417, 299.549052443439, 300.4740472590758, 299.8203401119083, 300.5606691607799, 299.28497089810577, 299.6048179849605, 298.4481926328477, 298.4020873923371, 299.41054743957454, 298.42356994655614, 298.7453393557142, 297.97266893612306, 297.9393083103829, 298.58152684422714, 298.57096623690137, 298.21994588082566, 298.833171452796, 299.4888483582923, 299.40914341505953, 298.15056842179115, 299.3614321843631, 299.10678012296665, 299.340855613111, 300.2163895598117, 301.21442807891714, 301.8537928058229, 303.2325630310659, 302.76413045848227, 303.0243877666743, 302.92845956009796, 304.4252876377452, 304.8432268735075, 305.1531130642497, 304.43800340165893, 304.43466557652, 305.2757017929958, 305.4795461605731, 305.0369175517899, 306.33998556644025, 306.3636788533146, 307.1562320522279, 306.2210314477534, 306.81270069490336, 306.41401332931844, 306.39065191234045, 306.22199229814066, 305.74834215696904, 307.0969348668627, 308.3608239731114, 307.2664434908493, 308.38566228597443, 308.86812379982877, 309.7651708811832, 309.16111981690744, 308.52228685445107, 309.3207402643872, 307.94483017114663, 308.7432848424724, 309.6649002971448, 310.23674551538255, 309.1343105435097, 309.26656618354804, 309.8991923010775, 308.5988276980678, 309.25740293540974, 308.4163783313256, 308.12044918109933, 307.9508022092599, 308.17339583436274, 309.49319880499667, 310.087238270384, 311.5160048149593, 310.3333668836378, 308.9455123481304, 307.95006647489095, 308.97199146165923, 309.7741786894599, 308.38575705274235, 309.42256031578717, 309.14679672671735, 308.88521741964195, 309.41977797489375, 310.71347140763186, 312.1326358671901, 311.2856852162561, 312.3325795819748, 312.18975688646395, 312.9725414128097, 312.7062953454625, 311.5800988115329, 312.77111033839975, 312.62412959607116, 313.91926599901893, 313.95112332938106, 315.2361438489909, 314.45202022295035, 314.61331753691763, 314.62592586924694, 314.91183367766564, 316.3652966192998, 316.36002006721895, 314.9889958926674, 314.53960790732225, 313.50983752310765, 313.5269314134644, 314.1637440847639, 313.3433390047431, 313.7531924216297, 315.03541737103603, 315.9964564991442, 317.20347236896043, 317.4954689149492, 316.8995834803906, 316.65410657350446, 315.9303030667645, 317.05791288529093, 318.6305230251822, 317.76601929955405, 317.6595491070878, 316.16842053940803, 315.5464215004829, 316.01194842996597, 317.2225582159548, 316.7914758515893, 317.3589415413279, 316.55308525257266, 316.0511038679681, 314.4712023633035, 313.7107684451178, 315.14874310592586, 313.718955459219, 313.02136758569617, 312.3185907531664, 312.4579532885226, 311.5751595936932, 310.2057169234392, 311.0007849774692, 309.4849522891909, 308.27119196585215, 308.6652679340058, 309.48682480229763, 309.25017213859445, 310.24002004618046, 311.6114032039477, 313.01658451619784, 311.7303327388224, 311.67628046378036, 311.2555632058434, 309.8245207228915, 308.50462625590154, 309.6087046530598, 308.3727421883933, 308.3804659853527, 307.350286755174, 307.12871022624097, 306.29968232393503, 306.72340236308855, 307.93445215535377, 306.4679530638106, 306.3821683352997, 304.92862235174135, 305.91335921719553, 305.0109775963874, 306.15347778643303, 307.1457342082539, 307.3076416761921, 306.92059298508894, 307.2170781616098, 308.6118870163233, 307.9954110554143, 309.1610589831536, 308.3559321101926, 308.7274936408016, 309.2672228962775, 308.2679968647586, 307.0632130147569, 306.81280199982683, 307.29819485982887, 306.17080635062905, 307.2039313083841, 307.67236106329165, 307.22234824604124, 307.3722046579997, 307.9229306488168, 308.68769584536585, 308.3312519462052, 309.7907041084433, 308.99047738018055, 307.79473108262863, 307.47185497502596, 308.40631705157944, 308.46876015988505, 308.2138808285088, 306.93871125500857, 307.62424918591995, 307.3062442049918, 307.2347419794125, 308.07131478418654, 308.9189487651589, 310.03088696806145, 309.73123547620025, 310.6633211582661, 311.35264528504496, 310.62167718045515, 310.4726517467767, 311.6834246696677, 313.22385963646025, 313.6878777519354, 312.34018101049173, 313.54772719201645, 313.8965571545403, 315.1010765836913, 315.44442934034373, 315.1981225181179, 313.77164337505735, 313.35239898824875, 314.8851150423711, 313.70050834293613, 313.2714933255863, 314.46344985798726, 313.94991215858755, 315.4446266170212, 314.5032645271799, 313.58420349434294, 312.5270271420453, 311.02001146394844, 309.51095155975344, 308.839658381188, 308.3968917981342, 308.7903824119937, 307.27628883320165, 306.16834604359286, 305.2097357469954, 306.01098603223556, 304.74441935055495, 304.1023178525957, 305.1313616010643, 304.31178509042167, 303.7571916956587, 304.00864575811374, 303.7451141212988, 304.65297475252225, 304.19022718465135, 305.02764368080005, 304.07689975711884, 304.11203615066773, 303.1886278512775, 304.58749716146326, 303.20750641198555, 302.7468382116433, 304.141275860566, 303.399165981979, 304.5458231634839, 304.29580809410646, 305.7559307266712, 304.31961708020134, 305.15147658659487, 306.48049426544486, 305.8210402187184, 305.72883799453405, 306.2013543932896, 306.78286418763736, 305.33286561843016, 305.8550970375039, 304.9781325669961, 303.72617133572845, 302.5079865117467, 301.60216766093566, 301.00016510098203, 300.43168489872465, 300.01736514651776, 298.7907961797001, 299.990153929557, 301.07015813592744, 301.6266443246764, 302.7425065953903, 302.09816006833836, 300.6561176103372, 300.6424355884003, 300.982996807598, 300.43027839986075, 300.35991960677313, 299.7551090138981, 300.97564819739966, 299.9869829321229, 301.07834171479146, 299.71295860961624, 298.22347243463474, 298.874541304398, 297.6521278348245, 296.2391646085146, 297.04266439873976, 298.2339872403879, 296.99954229250494, 296.39464307961896, 295.3485024211251, 295.60379200349007, 296.93712637493, 297.37268612259544, 297.09717831937206, 298.133345149739, 297.0019532028443, 295.67669846793905, 296.09660111824905, 295.5756220359156, 296.51767004324813, 296.5084879369295, 296.2035521908697, 294.8515876378756, 294.5870171693656, 295.01092370513965, 294.36406624837946, 294.47430022683807, 295.12314738914193, 293.8131373418207, 292.7425763887762, 292.8745110759368, 292.26160905489496, 292.52861417366626, 291.81248903443253, 290.8029213418647, 292.2365089573913, 292.78203569669523, 294.0775535288279, 294.7040964211989, 295.0117161183898, 295.4005537144134, 295.929249262804, 294.82947777031916, 293.70855036679285, 293.70255099365795, 294.9840056448199, 295.98188959659956, 296.1266036435427, 297.560272596198, 297.4667987579969, 296.954520595072, 295.51669608416404, 295.61693926586037, 294.3003782245478, 295.5891843713718, 296.34566995793443, 296.8248922564808, 297.1437180714291, 295.74123392159845, 296.42740577131445, 295.24443598130256, 293.8950802205989, 292.7181226061495, 293.1246847037022, 291.85187870083786, 292.03104326989336, 291.28216913426786, 291.69848414215, 291.79800745519833, 292.5682924526783, 291.8092357250397, 291.4514501220173, 292.3568993989594, 291.65379128704063, 293.03638872497066, 293.2456114142574, 292.77202784368905, 293.21883952566145, 292.0179620250805, 291.49717919118694, 290.7840236918834, 291.6199655368514, 290.7975407942679, 289.8292369920089, 291.0510456009947, 291.3219893146109, 292.30312707721146, 292.25299708568684, 291.2181265499873, 290.70375486378776, 289.25211007781667, 289.3267574758633, 289.88437518148845, 289.92462045744855, 289.14525382115534, 289.2227534571877, 290.3247872784551, 290.574406188969, 290.22673978847644, 289.19682412384253, 288.3464144500002, 287.15489500456073, 287.35193632115676, 287.83510149809695, 287.12002561167526, 287.1034461298275, 288.51350985063726, 288.0777136195045, 289.3962008527057, 290.3726945417151, 289.2660897156349, 289.03718709678554, 288.819839279562, 289.89585947326094, 291.22172888633867, 291.8211676318815, 290.37594549169205, 290.5661304795284, 289.25193707835695, 290.0802035447079, 288.6560952637558, 289.7070071174532, 289.30309755000667, 290.33325918614906, 291.2790452668108, 290.82195098736287, 289.93081088751904, 290.2471565515165, 289.87984502620844, 289.54504472786084, 290.82272079851566, 291.905678539303, 291.2937823653963, 291.45338006905, 292.48731004103456, 291.10787992804876, 291.8091725297447, 292.15039616677535, 291.28091520028346, 291.0786387637648, 289.99553495677685, 288.68104524944346, 287.79517331685673, 288.36857499275453, 287.8438262145039, 286.6447475077124, 287.4649922475683, 287.49775006247745, 286.4899878079787, 286.9969234699501, 286.9523499047385, 287.7643824007471, 287.86909031950375, 287.6083079211439, 286.1832248602477, 286.29090840124536, 284.95522379917213, 283.9326346664484, 283.28214783910494, 284.00203574283114, 285.2298612961916, 286.02975002925746, 285.64933764187504, 286.9712458582131, 285.9617802352355, 285.11223063705285, 284.49800531337513, 284.9268865968042, 286.182672990687, 286.50401014775474, 286.02200772452125, 286.14905569574836, 285.97187400802824, 286.6553671984741, 285.337481854089, 285.9973881048534, 287.0207841793135, 286.5797457846742, 286.9337548627421, 286.4354258683075, 287.8581344241841, 289.03659652069587, 287.66008497953027, 286.8069751406848, 286.7267570332773, 287.2681930719517, 287.21160692611267, 287.33242459838027, 288.68536381015923, 288.83734802172023, 289.51047215827856, 289.484072823619, 289.53777148682275, 288.7344594196995, 289.23016911695146, 290.3356150608695, 290.5778685148956, 290.1310598877347, 289.3868190881782, 289.21714330945434, 288.8611385115785, 287.5742293436932, 288.5922770461548, 288.2326479953649, 288.9432617663993, 288.47334378659184, 287.37058173571626, 288.0967548559247, 288.8011314423691, 289.9103881141757, 291.3282495605937, 290.50355095299915, 291.72507710038667, 290.78627453210623, 290.3582231530136, 290.89548139346203, 290.1279587870545, 290.66371706801357, 289.9430239427091, 290.6292045077081, 290.50231170347166, 290.467736054502, 289.0915152067895, 287.96659601506843, 288.6380247659592, 287.8616483327951, 286.7279201991542, 287.75773673316866, 288.2968457503593, 287.31596277708405, 288.3735066895345, 287.34884947573664, 286.59280589438504, 287.7236151328483, 287.0269384646835, 286.47330060834287, 286.37439064058185, 286.2661494604549, 284.9632872103058, 285.438422783942, 286.164765945111, 286.6559042949925, 287.3096516873585, 287.20107241754516, 286.04766050239306, 285.348212520608, 286.5890474693844, 286.1240367294077, 285.65582782563587, 284.28082867311485, 284.5773345413695, 283.71380911512676, 282.97767745538823, 282.0630101860859, 280.8219984017455, 280.4664317175948, 280.0498136595365, 280.2065149546413, 279.5098545062283, 279.54722959392166, 279.73619652030624, 278.56201604983335, 277.6310942834204, 276.81198497492346, 276.2703959629074, 276.81198772004194, 275.60420320519404, 275.9405667508308, 275.6794720311146, 274.4029928171559, 275.25986392055637, 274.49041459887115, 274.11404146985274, 274.6842549234787, 273.57080955054346, 273.0483636509461, 272.3823545182062, 272.28283794110484, 272.9092328140844, 271.64285271793943, 271.55098015006547, 271.24910472253487, 272.1974118833913, 272.43995426725155, 272.9824612014656, 273.48426869473917, 273.03895497917534, 273.86778533469305, 273.2698783007458, 272.8314820322257, 271.49523506007614, 270.313210573122, 271.502905187665, 272.5499475493797, 273.29542285114223, 273.0700969249742, 273.2000632872914, 273.01160138058873, 272.9761850534432, 273.054411594043, 272.3097709232316, 271.5572243896799, 272.6998261375318, 273.5775036455842, 274.85988839070853, 275.5212822479348, 274.9211764210435, 275.9745171916539, 275.2122504227413, 276.07470328912984, 275.47705577243704, 275.60275114878544, 276.10375791819865, 276.79486563826293, 276.1963835388789, 275.2942926025343, 275.189352769853, 275.23474582739385, 276.6046740354641, 275.4405747554122, 274.51781837818146, 273.53097868877455, 272.85067087766765, 272.8494117012876, 271.7068195317173, 271.2313910823517, 270.5152303562077, 269.40680982150167, 268.1086555608028, 267.2297377491454, 268.21521984869264, 267.98725771931623, 268.9615979138804, 269.53233057438547, 270.7851397871963, 270.6905231932241, 269.71189629133187, 270.5833878390025, 270.6966212009762, 271.60655405350195, 271.32488572006054, 270.7256806492504, 271.4074756764546, 272.69354209365287, 271.85949023323815, 272.5858188681068, 273.33802132247735, 274.11677328858144, 274.61390874671497, 273.5353839956894, 273.037763340568, 273.1263639975902, 273.60243927257693, 272.93934664770063, 274.27277276807854, 274.5813509544567, 275.649131675675, 274.69005760819954, 274.75784871068623, 274.244237616765, 273.31512443398435, 272.92114920180785, 271.89136747477903, 270.9554443898604, 269.9800805359042, 268.98221479775464, 268.3199470944881, 267.4815041314295, 267.6729250583263, 268.0475585412896, 269.30173789916824, 268.7056578170476, 269.0262806567279, 269.3181727736914, 268.7945816547926, 268.7172296283962, 267.67840704662433, 267.6339994491351, 267.50961143320234, 268.7264497496854, 268.4878211615074, 268.410658801993, 267.2718408796667, 265.96351394715634, 265.0204884144782, 264.57284166870613, 263.4669361284047, 263.6840785073542, 264.5055993143508, 265.47126055288595, 264.1979892943731, 264.76795514941057, 265.84334132476397, 267.14123618215166, 267.8505956694481, 268.41816371793107, 269.12190053488354, 268.6931370397, 268.3971341152838, 269.0898373658342, 267.8120595066762, 267.1769885988925, 267.7365644086889, 266.46278800365235, 266.87671343788054, 267.57023214846276, 267.7113563607866, 266.90276112641845, 266.34565829838874, 265.3944316339453, 264.4184689951625, 263.7650105651993, 263.64863232659326, 264.1052130482465, 263.402542397151, 262.2168806711976, 261.8435030583743, 261.7148978506901, 260.4282083493166, 260.20541406458955, 259.90508206620115, 260.4426667176709, 261.21749501810535, 260.51978633078954, 261.19377034638813, 260.22061747102174, 261.2952421217803, 260.5337170805894, 259.9665026963093, 260.12081303476486, 260.22243559158727, 259.9019484539103, 260.847603354707, 261.9422328219579, 262.83453578370495, 262.95068199699244, 262.63787118392105, 263.0131518919635, 261.8597883497241, 262.37474869680165, 262.25158053960865, 262.2036020463757, 262.4006384487525, 261.9924733106548, 260.7957915260125, 260.2675975068339, 261.56245464231114, 261.53029130613845, 262.45849023260246, 262.18666780157736, 263.0386296549294, 262.01315234270044, 262.01536675572675, 262.58351297404175, 262.83911040102345, 263.58406393821986, 264.20363691518946, 264.61310905479337, 265.3694613368615, 265.13829347276754, 264.5353526359652, 264.00975580326394, 264.5108641551771, 265.40878686350794, 266.4799478036354, 266.5388085441954, 266.67934187968643, 266.91933964567454, 267.75704683142004, 266.75373832317865, 265.54513560300757, 264.66609836229475, 265.053104440235, 264.0670697089025, 264.9434001669132, 264.4107620308129, 264.2746546380932, 264.0766591481399, 263.98917334646876, 265.2168881514438, 264.81770697028094, 264.2172593973967, 264.89844854836434, 264.31325619382676, 263.16357541515754, 263.3583794259191, 264.481784740828, 265.71829630438805, 266.29237303707566, 265.25346415118463, 266.3472466986003, 265.87658605340863, 266.29357388473767, 267.1992391627994, 268.1350884433037, 267.7548203759052, 267.4642307744275, 268.00380550417844, 268.4901575030478, 268.0526847891728, 267.8733413109138, 268.6788713595206, 268.18695018212304, 267.87816603743505, 268.9754879509946, 269.574122837606, 268.5662593671468, 269.7133408368816, 270.48649143128233, 271.07291935281347, 271.472153036679, 272.37149376740075, 273.3495967106558, 273.9873643447051, 274.17319271525986, 275.1114580082239, 275.8914809273685, 276.0309835477045, 276.50267846934435, 276.07156325855715, 275.1246619781416, 273.96741332425063, 273.87823515775096, 274.76233550473404, 275.8609367021463, 277.17586380204364, 277.50710881689355, 277.1242948519232, 275.8592736458924, 275.07063649634966, 274.56617330868227, 273.9162477551265, 275.0274934186705, 275.0050233902684, 274.5974800066024, 274.19562277648305, 274.1044056023429, 274.8999378058984, 273.8588403731148, 275.0218685791504, 274.034839958482, 274.990088908576, 274.0736817705973, 274.8679342341734, 274.1959600134928, 275.3706799993844, 274.90799378665804, 274.8943575063114, 274.8926184670205, 274.5400282479332, 275.7302081661074, 276.09007606918726, 275.1070548981681, 275.1543654340362, 274.08415253164685, 274.7058662427743, 275.94612926626513, 274.986142705093, 275.2180195345832, 275.3339754733772, 274.44910315234165, 274.83960930731865, 275.5817991408806, 275.83547590906284, 275.1020858865053, 273.7414215093323, 273.90988513584756, 273.4718908434825, 274.3697459817635, 273.5284421462453, 274.7051554231453, 274.63833872733545, 274.1046152223831, 275.0432017435269, 273.99783762240696, 274.497034765601, 273.8500238310162, 273.6072007791186, 274.9057882282224, 275.9245613031108, 274.653154244886, 274.11956334361895, 274.68402573484394, 273.31847690307933, 274.2202214348529, 274.8689452532622, 274.2547920607743, 274.546089347384, 275.33784602123393, 274.9567864607837, 274.0229714118766, 273.2828848075652, 274.1150176936432, 274.4389457022982, 274.95747127513005, 275.93499441782467, 277.1256608120673, 277.5850382629809, 278.5473580145084, 278.12307635844434, 276.8759840074851, 277.92306868037775, 277.7571870158226, 277.65934060089535, 276.71386955437066, 276.4434696226811, 277.18857110259495, 275.9084594934515, 276.6489947240845, 276.28621875664766, 276.04684597519514, 276.4194023607294, 277.0816011042098, 276.80394498018563, 277.34806915078985, 275.9671374229787, 276.52011749693287, 276.38892485078503, 275.91050239892235, 276.38385415139237, 275.1572589423319, 276.44790404953665, 276.3609665879768, 277.64780777345425, 278.65818741421293, 279.55719929226217, 279.97610307046386, 279.8102264906344, 278.61526541145577, 277.457607872975, 278.4167968281188, 277.52059744738057, 276.82928414957183, 275.8600618602881, 274.6622174712759, 274.27111404215, 274.80992964678217, 274.5890440883925, 274.77344211714296, 273.62986625143645, 272.86891520393755, 273.46545234055225, 272.2049581992916, 273.4336743236687, 273.7359587560937, 274.16749185125377, 272.9025028970668, 272.80577486645967, 273.77830071290975, 273.51050522861584, 273.22848506465726, 273.6719323390642, 274.0250960008773, 274.76785408127455, 275.46703326021606, 275.71029359100123, 276.81309804444174, 277.5851495442074, 277.9770347437387, 277.6328206475806, 277.1793064460872, 278.4447909874248, 278.62305356432887, 278.393424935346, 279.06488249976275, 278.02490696955715, 278.4991802703462, 279.4967961719285, 280.25788711736885, 279.7065529047487, 278.6721455803275, 279.8236500434098, 278.5238872082595, 279.43259479294153, 278.411232589788, 277.2896300398483, 276.46998838573575, 276.9014190896576, 276.10247975289184, 275.45280328723555, 275.5773591071469, 275.56666275112065, 275.35758287558957, 275.5632372666242, 274.33071389056107, 273.3096562058196, 273.1693578229517, 273.18849729969344, 274.5081166933881, 273.63382988586125, 273.3772443656716, 274.6699932201921, 274.48620707093454, 275.0624510539857, 276.05249954807755, 276.12632338495587, 275.8427811491748, 274.97537374254387, 273.90973862409254, 273.91784683065913, 274.46975590068246, 273.8994213411337, 273.30579253478254, 273.77822973270435, 273.43193491265123, 274.6784150267004, 273.44481590315223, 273.8519817898882, 275.0709881689578, 274.60951620512657, 274.1057072058496, 272.98503932732893, 273.5889984518275, 274.01178104213426, 275.01199261849786, 276.29679939167295, 277.58036582669826, 278.1557708202741, 278.9306111861637, 279.40547276016656, 278.39540393461726, 279.1066085642186, 279.1648876326094, 279.11143913149556, 277.9300962525948, 278.2998600576104, 277.2429816613514, 275.977950227683, 275.0863392088177, 273.7712963926537, 274.7801148644234, 275.740897561536, 275.67985559725577, 276.28413817821456, 276.2316754067284, 277.51988046156293, 277.63725318760277, 276.7676189189346, 275.69622729832616, 274.5951724737096, 273.44522821838683, 273.29568348967723, 273.49520152785766, 274.32794359977106, 274.73840091135224, 274.51782432023504, 273.4304238512123, 272.3767932851553, 273.34548788636505, 272.67926040627435, 273.72897528167255, 273.9950220940279, 273.0108240963841, 271.9324708748731, 271.56530081679807, 271.5337503762048, 270.6182387163316, 271.0071273505988, 271.0991154838608, 270.14764471176915, 270.3823024693236, 269.0440585573693, 269.11518807857493, 270.1070591423294, 269.09167122454903, 269.98314355777933, 268.843575883707, 269.8336121029595, 269.9168929769721, 269.38525462568947, 269.0517435171813, 269.2338214877536, 269.4454830163482, 269.2112658556339, 269.90272592384883, 269.65837683012023, 270.7175886431698, 269.409403940924, 268.46530008906313, 267.38666339743946, 266.97814708839667, 266.4767644374903, 266.13416448113276, 266.25329627737017, 265.9579207901359, 264.8777084143996, 265.1346973350471, 266.3511140750997, 266.5591263266258, 266.3014799728384, 264.97138064189045, 265.33780579999683, 264.86402470032766, 265.23341734376993, 265.03277747792225, 264.6201980782237, 265.89207035796227, 266.1959807679754, 265.09166475940447, 264.18406040881547, 262.94350204624476, 262.4078316638022, 263.7089030947075, 264.6441463243144, 264.18048642700734, 263.6134918229246, 264.61204581336386, 263.3549466962423, 263.8994100968531, 262.99146571086055, 261.6916231281521, 262.09975325635764, 261.3872460162937, 260.2791158707222, 261.09357375959377, 262.0046364538377, 262.52608506059244, 262.9327459734756, 262.62148872421164, 262.8464669267115, 262.6120813031823, 263.5282869149602, 263.8109771529282, 264.276575136816, 264.4368646065784, 265.49491952952013, 264.2428516475665, 265.29135116668186, 266.0660404654384, 266.24638160474404, 266.2953188085974, 265.67996761286264, 266.6449153033989, 267.29955041574186, 267.28086119667927, 266.2693939156329, 265.63248304019345, 266.89552737251887, 266.1396265282872, 267.21656974669696, 266.3237212024485, 265.8409740527773, 264.543650756287, 265.52561749150357, 266.2296532159389, 266.3694809574784, 266.977428553543, 267.87868858465947, 268.7521977938798, 268.8044635837225, 267.83789158170606, 268.7535210850905, 267.7975284228165, 266.86988000604714, 267.12352888896345, 266.14255846178105, 266.95280274424704, 267.14951053139424, 266.40593662886084, 266.32200593014727, 265.6866040977281, 265.8539421716282, 266.6004189916539, 265.4783343612777, 265.59856277340845, 265.85632847776225, 265.52956260190075, 266.2529715794922, 266.7729687928889, 267.8077031177744, 268.7182913676336, 268.8668866677459, 268.63670366780235, 268.31100053236463, 267.43758655658934, 268.6875207526889, 268.8611307497339, 267.5231347525777, 267.1972224264244, 266.9974195705007, 267.1328756827127, 265.9974030202272, 265.6261339853191, 266.76479207053956, 266.4343347005058, 266.1648330729593, 267.21864634332013, 267.786518008637, 268.01083123134975, 266.89462831477243, 266.7716010509194, 265.78682442941454, 266.1540400973472, 265.3931679455121, 264.4981812945012, 265.6857115292071, 266.9375203232988, 267.7393434307175, 267.3751471020721, 266.3159659542844, 265.7855013455359, 266.03884909871607, 265.64227659996806, 265.7533588430403, 265.7939624041148, 265.1308623964879, 264.43528908406097, 265.319276553716, 265.78937682269117, 265.11568218074194, 265.7358684260334, 265.2703151690948, 266.1156253461338, 265.2415437095478, 264.65023301997525, 264.88988531483045, 263.62862921262916, 263.0612002669502, 264.30283393261385, 263.0688741285198, 263.1699733193156, 262.9341855276113, 262.3289517616464, 262.89916262378046, 263.251934829506, 263.6640400390508, 264.11798193378047, 264.17110925567977, 263.64040903335484, 262.8355934529545, 264.05765415543203, 262.76536034221897, 263.4773588893754, 262.75185347434564, 262.6246014143931, 262.0323688194541, 262.15100539916, 262.56645801174545, 261.39821538004327, 262.0786123024913, 261.6260483361765, 260.8159990383538, 261.6398188104435, 261.45210880421416, 262.3359964035108, 261.6376872267448, 260.5428083929022, 260.2713037709614, 261.12562110959243, 261.97361591799665, 262.76020141108484, 263.152811423786, 262.68587489695375, 261.5721442405139, 261.32557183372364, 262.2584686788861, 261.5436574816089, 261.8387853693628, 261.8385898297908, 261.8955432002437, 261.72504647995953, 261.3544865961518, 261.965664030125, 262.3311490369827, 262.0788663933772, 261.76235636939015, 262.347503360329, 263.1431609857177, 263.6414847773512, 264.906139588943, 265.54473402913084, 265.1745386159286, 265.44887110349276, 266.65748192418664, 266.62719620675165, 265.59035868555316, 266.3373539577226, 265.54623843501406, 264.62033984161627, 264.4573562065204, 265.7536447988337, 266.141548060761, 265.4310394765959, 265.8857324401798, 266.723230948392, 266.0233296605276, 266.99804348461714, 268.2340548783706, 269.47410620953247, 268.80208037461466, 269.97059983017107, 269.9115858895365, 270.1923964737594, 270.91335575393646, 271.3044801654067, 271.2951548847038, 271.58107052264126, 271.1387706624061, 271.1026314449409, 270.74877511779334, 271.2131009112236, 271.58388604955826, 272.13924925041687, 272.28715840062785, 272.0691412763587, 271.46505295368513, 271.44570351237735, 270.4211031648551, 270.5615026042562, 270.0391658223462, 270.1349662101679, 270.1023519140246, 271.35864410805874, 271.1567602142745, 271.42399987128806, 270.57013773345375, 271.9115316770964, 271.51585638927287, 270.4689153134611, 269.94026642740766, 270.04204657044886, 270.10572111373824, 270.5890299110736, 270.2235405772823, 270.6791795273655, 269.5041748988276, 270.2440768596363, 270.4081949364278, 269.6442174979819, 268.3993349172753, 269.49566342975834, 268.707765580094, 268.40626897696234, 268.49898406781523, 269.44230464933486, 269.5087733560575, 270.3262469588718, 269.9641800740133, 268.7380942622458, 268.00337872007776, 268.16930147334637, 268.21362751729583, 267.4563821716507, 267.2879033411314, 266.989920609567, 265.8117330397419, 265.75497341925916, 265.0542820034564, 265.4551228410738, 266.403266037132, 266.36543418701444, 265.50114534247206, 266.5473349491358, 266.56036573254755, 265.5910585878441, 265.26017513685747, 264.9246780503585, 265.51110088280234, 266.03970227149784, 266.1813208428058, 266.7309189435197, 267.8335802598074, 268.99683246195843, 268.56155388942346, 268.9766505710834, 269.517413328825, 269.7990992468199, 270.201055615884, 270.04630109489983, 270.9757356655083, 269.8862912712828, 270.6965037419712, 270.4929943308299, 270.84821578587326, 272.06379290884183, 271.42869851701823, 272.38286512990675, 272.47891038015075, 273.81307391217314, 273.06731431386777, 271.7742581220029, 270.9047619676124, 271.09561402428744, 270.79609575411865, 270.938151866448, 270.09646793601115, 271.01630698440084, 270.6618891336054, 269.5236228859988, 269.69070739638533, 270.30431922898083, 269.36742607208265, 269.24493016415937, 269.95669186889666, 270.3470616444833, 270.91002273836415, 269.8178721409224, 270.31549331715524, 269.0485824933894, 268.71953987201664, 270.0442899264993, 269.05176118289614, 268.3495695405819, 269.00968612431177, 269.25237951515703, 270.31401449350346, 270.47717812352033, 269.45962249667076, 268.93832165977403, 269.7098696454393, 268.4551952538943, 268.8309467465741, 268.2901471363966, 268.11065073551345, 267.5276595051647, 268.86358169773655, 268.3486088250472, 267.58576884895217, 268.8440062662532, 269.752846087199, 270.0859498254079, 271.05909672524916, 270.8463103568751, 270.3072902874916, 270.2652210430756, 271.043836852352, 270.57228481946447, 271.1905104354507, 271.46438824605684, 271.8523914486794, 272.13440863657155, 270.9574785121681, 270.83707618090153, 270.49484660427487, 271.54199866015284, 271.7048807307961, 271.2611189121996, 271.8357202904126, 271.59929020737013, 272.402085727607, 273.55169910281813, 272.50646109089735, 273.6506182060238, 273.7178332604246, 274.0633518023709, 273.50585735908425, 274.7549430181014, 274.17567826769033, 275.1811730672417, 273.9710379970108, 272.9844077353424, 271.68188649033544, 272.53822023441205, 272.6035104795643, 273.7395984224041, 273.76600190899484, 274.3544036455576, 274.0425309359155, 274.7691138641167, 273.9256693899809, 272.9169081078016, 271.9023531105101, 272.3857566284673, 271.55937367290943, 272.77723765505306, 274.1292754859255, 274.6997450039432, 275.50668718413925, 276.4473752319209, 276.1070779633929, 277.2996997710951, 276.5997090128258, 275.50663605781955, 276.0381007566994, 276.3151927591557, 277.1374176144615, 277.6696640948855, 277.38360495778966, 276.0529427406333, 275.32059041599865, 276.38316131166584, 276.81816268205574, 277.40700710154016, 276.0885857127859, 275.8151509892542, 274.9534235770195, 274.82801833980454, 276.1041015992688, 276.35595370933424, 275.76603316237674, 275.34611671680636, 276.53271041608065, 277.856818013721, 278.7052081923936, 277.41449603207303, 276.70242438544557, 275.60424823882374, 275.94717003765896, 276.0259413976756, 276.92801143343286, 276.17682264360144, 276.0833745747769, 275.26421729912545, 276.5014921595559, 276.01675713889205, 276.50262584610033, 275.22734893372586, 274.50112323033596, 274.111002932979, 274.45750139484466, 274.7679458130681, 273.860634793737, 274.97400534771134, 274.0212144694309, 272.8556021982112, 273.53124421975986, 273.5095907148633, 274.1199114316149, 272.92726330484095, 272.13606180845346, 273.12477067877984, 273.9600493328417, 273.1602676906157, 274.26874995277933, 275.4381287560133, 274.94814619542495, 275.7093368079934, 275.3115564628119, 275.0891352662906, 274.66349455333125, 274.8007809316387, 274.70091931190996, 275.0471882838131, 275.29320000555, 276.4170689444257, 275.8473353739511, 276.43848271372093, 277.5944097065322, 277.3268481765576, 277.83145076296086, 277.51102356393926, 276.97377022735856, 276.67295877753855, 276.70480408769237, 275.52349070702934, 276.8007344733234, 275.9334525809579, 276.614048529118, 276.9151613238372, 278.073777870971, 277.8520547831723, 278.5584570313368, 277.9025091605352, 277.22870081134937, 275.894226147696, 275.7343959888323, 276.7345866611726, 278.06398933616134, 279.1300684469463, 278.90388707520776, 277.72135380841445, 277.73651481602235, 278.6980921449847, 278.9037038086704, 279.4518601943207, 280.46230777878566, 280.9851371458384, 279.6669261676471, 280.2639571409381, 281.16844036960435, 280.15086648798007, 280.6228970791225, 280.50554311279853, 281.2307636040205, 281.66334472533185, 281.0320935122222, 280.9885448227499, 280.89042729957123, 281.53808491911735, 281.6480818051773, 280.7807499609167, 280.1539699641343, 279.3156117705607, 279.6520881204959, 280.99799549606394, 279.66277657913344, 279.96943251517854, 278.7556146124845, 279.1526606732416, 280.25043354813874, 281.60969385920583, 281.815708236861, 282.8465661879553, 283.95205527467704, 284.14751122114853, 283.21100068268845, 283.60707802318495, 282.4515674770725, 282.3740761515121, 281.85571169439163, 282.2591383038998, 280.8859996154238, 280.09027762005024, 280.8194768558593, 280.812398313754, 279.8209324764798, 279.81320047471, 278.5409832832132, 279.2722016198532, 280.6444731285968, 281.55396408138364, 280.8872190323734, 282.1442082993632, 282.9233105785207, 282.6156522249092, 281.75772147118425, 282.65717245195094, 283.2167294161862, 282.055310803735, 281.5652330103203, 282.50411983445167, 282.943382244971, 283.76078283481496, 282.64620932307076, 282.34317806418164, 282.59372695808946, 281.84710363798706, 281.9433187228357, 282.941415615791, 283.0700865110158, 283.2659489140308, 282.29476435725564, 283.3931237201954, 283.112602574628, 283.2712340237975, 282.02699735496395, 283.0669223060908, 282.99249384008994, 284.33836219952286, 284.58869141601525, 284.59402628152355, 283.9723779011265, 284.1029075088462, 284.1694330649875, 284.710045731013, 285.7644465621319, 285.82985638421, 286.25449844692605, 285.02305485152334, 285.24274417534997, 285.3520443084054, 284.0361635060021, 283.16194254400335, 284.38598396425596, 284.46546395528736, 285.3681474067757, 284.98181373322086, 284.1996712472647, 283.6377896791956, 282.69803181907514, 282.080600944229, 282.8146408974035, 283.20598227419805, 284.55291563851017, 284.35529197140374, 285.23489933681606, 283.81148229447524, 282.5225066721448, 283.2496855725208, 282.9498981504884, 283.504339818969, 284.65940142517354, 285.73510795659655, 285.84990775433187, 284.93340940084073, 284.89237649305375, 284.41055061918433, 283.85077023397224, 285.25552763192724, 286.6064579353937, 287.4096705987443, 288.0759494595667, 288.71826005008955, 287.9296342909564, 288.64072817274797, 289.485552852907, 290.08253481942467, 289.48812816928205, 290.3337065331776, 290.43455912316495, 291.2116249249331, 290.08649703881315, 289.1239080311155, 290.1033983747716, 289.27361037761403, 287.8825693343573, 286.89969353426005, 286.2180102378274, 286.40582468825284, 286.3106570974321, 287.28560857883826, 287.11993470868447, 288.0699077543082, 287.28761083760025, 288.0329781477136, 287.95020504073693, 288.4507287302789, 289.5084795089238, 290.44833376474463, 289.8844566640505, 289.21083232747253, 288.30916304125793, 287.97955015971155, 288.96071952569275, 289.3290642610049, 289.88074676298515, 290.6472212044126, 289.25389585163674, 290.2789410978004, 291.5772502424745, 290.96540131378225, 291.1428702140302, 291.63596119189384, 290.8769759435732, 290.9799892255759, 290.14425676058636, 290.15472101354476, 289.6143589436245, 290.9734281235787, 290.1237639141511, 289.570432203761, 289.8292849198912, 289.9836610628544, 290.0302300515422, 290.0524333194044, 290.5539145087836, 290.5574712617881, 291.9140701349261, 290.62220493123635, 291.25638816305025, 292.17448449012176, 291.0497066059758, 292.1934778621529, 292.3692564854695, 292.75653285807715, 292.6450059969462, 292.8934547357857, 292.3314592297331, 293.1650016581272, 292.0321085067428, 291.7873721179614, 291.1387228149814, 290.1106114976324, 288.8331116071404, 289.93336575385297, 290.43098710250405, 291.4945423927745, 290.72009201941205, 291.14412234244315, 291.5912124579031, 292.52316182193454, 292.67154642948225, 293.2046326769039, 293.49749631773693, 292.23347617986985, 291.61886955592433, 292.650996989169, 292.74600863685043, 292.53432650807054, 293.1678004538063, 293.33393000569384, 292.3134979994677, 292.16706999667076, 291.3146571980254, 292.48911161023216, 292.4837008080836, 293.340944422515, 292.3697843978366, 293.7378080481057, 295.1440437696596, 296.38189349046644, 296.39585431515115, 297.55741918391635, 296.49871934109956, 295.14346022451684, 294.449350673904, 294.8131560223535, 295.5129596044575, 295.4120178510717, 295.13903638471965, 294.5889140689176, 293.961742555652, 294.1229938447017, 293.991065324728, 294.63490399168063, 293.7238671344607, 292.4371703780324, 293.40158984218095, 293.41021885552925, 294.06142558717715, 293.8000534178122, 295.0967536968284, 295.1745258849553, 294.08439968625345, 294.3357123925208, 295.05896140698167, 294.5011256258419, 294.9092464298692, 294.79593575673107, 296.0985375277662, 295.37658949029026, 294.62878458360797, 293.5650180805574, 294.07430439791835, 294.36775068509723, 295.2280037346335, 296.0162603069182, 294.6326582567918, 293.52716239854055, 294.0646163315882, 295.03393178447465, 293.57683658156554, 293.9548795924975, 293.5696843170711, 293.9301584914845, 293.115880114681, 294.10975345691406, 295.2801721523215, 295.1558137122974, 294.352999355304, 293.04934182275974, 292.45023910973333, 293.8305806562832, 293.1064174053329, 294.5718502395364, 295.07697612226343, 295.1935779475296, 296.20496371339306, 295.3678672718054, 295.67497643004407, 294.32251954823346, 293.57962179139315, 293.2560645002425, 294.4297575003928, 295.4712067198995, 295.3586771430411, 294.6094280651074, 293.4224641943943, 293.76661117412266, 295.2344744230966, 295.3739318856077, 295.35908028006327, 294.3897568369595, 295.2610780287098, 296.293127276903, 297.57130826220276, 296.78229057584065, 297.89841020147304, 298.7824183108401, 298.00988893480957, 296.98196900045974, 296.0503480844669, 297.4892413730064, 298.18797597462105, 298.62824431171475, 297.6245624755487, 297.57418737045947, 298.01003813308523, 297.06784080528183, 296.8863939031778, 298.2322569354016, 299.2794180337693, 299.1810061979525, 298.32590907985406, 299.74237572342366, 300.5469753702612, 301.7443789696711, 300.9737242119163, 300.4952922655042, 301.42223304765287, 300.29723735165817, 300.4632197429267, 301.5953603425313, 300.79724097274976, 301.1082600333722, 302.27574777782917, 302.31293593728634, 300.9122588022265, 301.2568136369356, 301.71396235285926, 301.6670018347122, 300.7218578288499, 299.72483732513484, 300.2293545816343, 299.40929409787816, 300.5691466217698, 300.6601386678636, 300.11638291157436, 299.52628224448017, 299.75548271676297, 300.25831307582376, 301.73835826662463, 301.1261020937078, 301.7407607641605, 302.0254433845249, 300.83415323481415, 299.865146551503, 299.8607721751943, 299.7180939422886, 300.6830818112904, 300.75923308975257, 301.3257317902076, 299.9501997703142, 300.29750341832874, 300.90785889712606, 301.93577843471576, 302.0670151063878, 303.2164400304859, 304.02134454744095, 304.6488208892217, 304.5204729114734, 303.9157883236438, 302.51040658378, 303.2681408114319, 303.3595777627942, 303.2599917644948, 303.57404169477053, 304.38621363011316, 305.8269731229365, 305.04529053313024, 305.3064573015811, 304.13167643961407, 303.24343026449253, 303.1239652135238, 302.23936452012566, 301.83196575289026, 301.44683533226146, 299.9476536135093, 300.6628918802418, 301.51692525365644, 301.5533948890362, 302.47777627750537, 303.83001139161365, 303.22228196490715, 302.9309059080474, 302.123693118544, 302.27775831122045, 301.3109600371632, 301.74112229347605, 301.67153234402576, 301.4511392862305, 300.83188339748614, 300.87142624486563, 300.04061146381395, 300.531163044725, 301.8162041094512, 302.5340582913713, 302.8523802758188, 303.2247807418154, 304.46855475058373, 303.9344759578766, 304.60135320377066, 306.0026948816567, 306.2082091194272, 307.6366054726608, 307.1557131640199, 307.4407196102706, 306.7266503649597, 307.7869191008425, 306.2706394335121, 307.65559757690215, 308.6102510841623, 308.51842760379026, 306.9778576371842, 306.9306764382863, 306.84862634108663, 307.2402957148534, 307.8259724636147, 306.91665850478586, 307.11423438729827, 308.4599574204847, 308.08472790607163, 306.68858771524407, 306.6275627641577, 306.25585540871407, 307.5609763980488, 306.86914827607006, 307.0260150961066, 306.20622731582097, 307.198691630391, 305.66818911612495, 305.0498277254329, 306.5637954239561, 306.352651027852, 307.6740812391797, 308.88132260891166, 310.3285352266319, 310.2565898069455, 309.0542868205618, 308.11130967655106, 307.4474081214765, 306.45613202057234, 305.44874812493777, 304.5917016240685, 305.7844307571647, 306.8270155061543, 306.0827426727008, 306.6854202786975, 306.1767498408882, 306.39602150650234, 305.98146127006004, 306.7297495686794, 307.9782584536296, 308.05863156509264, 309.13243944379184, 307.84747371647103, 308.8046868821519, 310.3132522566595, 311.6467242342364, 312.2673389009209, 313.1579159038033, 314.5927034986946, 315.37396924535045, 315.0668305266113, 314.23070288117196, 315.4332118240781, 316.243203359768, 316.0996165216486, 315.6016795521654, 315.2665858849959, 315.63062829013984, 317.1591247521335, 315.8807205111721, 317.199011926556, 315.7673075378215, 316.4550427213863, 317.83426225659935, 318.3012945344805, 316.890508363073, 316.1120044367191, 317.3037247247666, 315.72064125624445, 315.5914031283703, 316.99776846887033, 316.08221966089405, 316.2041840803936, 315.2872679963111, 314.14899830929704, 314.72793835806493]
for i in range(len(data)):
yield data[i]
yield None
| 1,310.454545 | 69,772 | 0.839209 | 7,535 | 72,075 | 8.017916 | 0.504579 | 0.001639 | 0.003095 | 0.001192 | 0.002384 | 0.001092 | 0.001092 | 0.001092 | 0 | 0 | 0 | 0.867284 | 0.056191 | 72,075 | 54 | 69,773 | 1,334.722222 | 0.020845 | 0.000888 | 0 | 0 | 0 | 0 | 0.00911 | 0.001916 | 0 | 0 | 0 | 0.018519 | 0 | 1 | 0.210526 | false | 0 | 0.026316 | 0.105263 | 0.394737 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 7 |
fcc683ebd5ca35466877fa1fedaeaaeab84cecc4 | 4,376 | py | Python | in-class/Week 12 HMEQ SVM.py | dvtate/cs484 | 725ef6d90a9d22694147245b9a0928477e053585 | [
"MIT"
] | null | null | null | in-class/Week 12 HMEQ SVM.py | dvtate/cs484 | 725ef6d90a9d22694147245b9a0928477e053585 | [
"MIT"
] | null | null | null | in-class/Week 12 HMEQ SVM.py | dvtate/cs484 | 725ef6d90a9d22694147245b9a0928477e053585 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import numpy
import pandas
import scipy.stats as stats
import sklearn.decomposition as decomposition
import sklearn.metrics as metrics
import sklearn.svm as svm
hmeq = pandas.read_csv('C:\\Users\\minlam\\Documents\\IIT\\Machine Learning\\Data\\HMEQ.csv',
delimiter=',')
y_name = 'BAD'
# Set dual = False because n_samples > n_features
# Step 1
accuracyResult = pandas.DataFrame()
includeVar = []
X_name = ['CLAGE','CLNO','DELINQ','DEROG','NINQ','YOJ']
for ivar in X_name:
inputData = hmeq[includeVar + [y_name, ivar]].dropna()
X = inputData[includeVar + [ivar]]
y = inputData[y_name].astype('category')
svm_Model = svm.LinearSVC(verbose = 1, dual = False, random_state = None, max_iter = 10000)
thisFit = svm_Model.fit(X, y)
y_predictClass = thisFit.predict(X)
y_predictAccuracy = metrics.accuracy_score(y, y_predictClass)
accuracyResult = accuracyResult.append([[includeVar + [ivar], inputData.shape[0], y_predictAccuracy]], ignore_index = True)
# Step 2
accuracyResult = pandas.DataFrame()
includeVar = ['YOJ']
X_name = ['CLAGE','CLNO','DELINQ','DEROG','NINQ']
for ivar in X_name:
inputData = hmeq[includeVar + [y_name, ivar]].dropna()
X = inputData[includeVar + [ivar]]
y = inputData[y_name].astype('category')
svm_Model = svm.LinearSVC(verbose = 1, dual = False, random_state = None, max_iter = 10000)
thisFit = svm_Model.fit(X, y)
y_predictClass = thisFit.predict(X)
y_predictAccuracy = metrics.accuracy_score(y, y_predictClass)
accuracyResult = accuracyResult.append([[includeVar + [ivar], inputData.shape[0], y_predictAccuracy]], ignore_index = True)
# Step 3
accuracyResult = pandas.DataFrame()
includeVar = ['YOJ', 'NINQ']
X_name = ['CLAGE','CLNO','DELINQ','DEROG']
for ivar in X_name:
inputData = hmeq[includeVar + [y_name, ivar]].dropna()
X = inputData[includeVar + [ivar]]
y = inputData[y_name].astype('category')
svm_Model = svm.LinearSVC(verbose = 1, dual = False, random_state = None, max_iter = 10000)
thisFit = svm_Model.fit(X, y)
y_predictClass = thisFit.predict(X)
y_predictAccuracy = metrics.accuracy_score(y, y_predictClass)
accuracyResult = accuracyResult.append([[includeVar + [ivar], inputData.shape[0], y_predictAccuracy]], ignore_index = True)
# Step 4
accuracyResult = pandas.DataFrame()
includeVar = ['YOJ', 'NINQ', 'CLNO']
X_name = ['CLAGE','DELINQ','DEROG']
for ivar in X_name:
inputData = hmeq[includeVar + [y_name, ivar]].dropna()
X = inputData[includeVar + [ivar]]
y = inputData[y_name].astype('category')
svm_Model = svm.LinearSVC(verbose = 1, dual = False, random_state = None, max_iter = 10000)
thisFit = svm_Model.fit(X, y)
y_predictClass = thisFit.predict(X)
y_predictAccuracy = metrics.accuracy_score(y, y_predictClass)
accuracyResult = accuracyResult.append([[includeVar + [ivar], inputData.shape[0], y_predictAccuracy]], ignore_index = True)
# Step 5
accuracyResult = pandas.DataFrame()
includeVar = ['YOJ', 'NINQ', 'CLNO', 'CLAGE']
X_name = ['DELINQ','DEROG']
for ivar in X_name:
inputData = hmeq[includeVar + [y_name, ivar]].dropna()
X = inputData[includeVar + [ivar]]
y = inputData[y_name].astype('category')
svm_Model = svm.LinearSVC(verbose = 1, dual = False, random_state = None, max_iter = 10000)
thisFit = svm_Model.fit(X, y)
y_predictClass = thisFit.predict(X)
y_predictAccuracy = metrics.accuracy_score(y, y_predictClass)
accuracyResult = accuracyResult.append([[includeVar + [ivar], inputData.shape[0], y_predictAccuracy]], ignore_index = True)
# Step 6
accuracyResult = pandas.DataFrame()
includeVar = ['YOJ', 'NINQ', 'CLNO', 'CLAGE', 'DEROG']
X_name = ['DELINQ']
for ivar in X_name:
inputData = hmeq[includeVar + [y_name, ivar]].dropna()
X = inputData[includeVar + [ivar]]
y = inputData[y_name].astype('category')
svm_Model = svm.LinearSVC(verbose = 1, dual = False, random_state = None, max_iter = 10000)
thisFit = svm_Model.fit(X, y)
y_predictClass = thisFit.predict(X)
y_predictAccuracy = metrics.accuracy_score(y, y_predictClass)
accuracyResult = accuracyResult.append([[includeVar + [ivar], inputData.shape[0], y_predictAccuracy]], ignore_index = True) | 42.076923 | 128 | 0.684186 | 547 | 4,376 | 5.312614 | 0.14808 | 0.022368 | 0.057811 | 0.080523 | 0.87371 | 0.859257 | 0.83861 | 0.801445 | 0.763593 | 0.763593 | 0 | 0.013322 | 0.176645 | 4,376 | 104 | 129 | 42.076923 | 0.793228 | 0.020338 | 0 | 0.731707 | 0 | 0 | 0.067273 | 0.015801 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.085366 | 0 | 0.085366 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fcdae3c790b8dbfe2b16aec6694cc334b980dbf0 | 838 | py | Python | django_password_validators/password_history/hashers.py | nelc/django-password-validators | 4d8dd88644bfe8bc2b3c42d1cd679c78ca04327b | [
"BSD-3-Clause"
] | 33 | 2016-03-01T12:51:13.000Z | 2022-03-17T12:28:47.000Z | django_password_validators/password_history/hashers.py | nelc/django-password-validators | 4d8dd88644bfe8bc2b3c42d1cd679c78ca04327b | [
"BSD-3-Clause"
] | 19 | 2017-06-13T19:06:07.000Z | 2022-03-16T09:20:01.000Z | django_password_validators/password_history/hashers.py | nelc/django-password-validators | 4d8dd88644bfe8bc2b3c42d1cd679c78ca04327b | [
"BSD-3-Clause"
] | 22 | 2016-06-11T18:54:37.000Z | 2022-01-13T20:56:24.000Z | from django.contrib.auth.hashers import PBKDF2PasswordHasher
class HistoryHasher(PBKDF2PasswordHasher):
"""
We need to keep the old password so that when you update django
(or configuration change) hashes have not changed.
Therefore, special hasher.
"""
# Experimental value of the of iterations so that the calculation on the
# average server configuration lasted around one second.
iterations = 20000 * 10
class HistoryVeryStrongHasher(PBKDF2PasswordHasher):
"""
We need to keep the old password so that when you update django
(or configuration change) hashes have not changed.
Therefore, special hasher.
"""
# Experimental value of the of iterations so that the calculation on the
# average server configuration lasted around 10 second.
iterations = 20000 * 101 | 33.52 | 76 | 0.732697 | 103 | 838 | 5.961165 | 0.456311 | 0.039088 | 0.084691 | 0.091205 | 0.752443 | 0.752443 | 0.752443 | 0.752443 | 0.752443 | 0.752443 | 0 | 0.030534 | 0.218377 | 838 | 25 | 77 | 33.52 | 0.90687 | 0.643198 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.6 | 0.2 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 7 |
1e0023b51e402eb5aeaa731e4c0ac9b38f3552d7 | 12,044 | py | Python | tests/loss/test_pairwise_lambda.py | rjagerman/pytorchltr | 625416e1e7d21fb2bbc485914704fc2e55274556 | [
"MIT"
] | 37 | 2020-05-24T13:40:52.000Z | 2022-03-17T09:00:52.000Z | tests/loss/test_pairwise_lambda.py | SuperXiang/pytorchltr | 625416e1e7d21fb2bbc485914704fc2e55274556 | [
"MIT"
] | 22 | 2020-05-25T11:35:38.000Z | 2021-03-20T04:08:07.000Z | tests/loss/test_pairwise_lambda.py | SuperXiang/pytorchltr | 625416e1e7d21fb2bbc485914704fc2e55274556 | [
"MIT"
] | 4 | 2020-10-16T13:14:16.000Z | 2022-01-31T17:18:20.000Z | import torch
from pytorchltr.loss import LambdaARPLoss1
from pytorchltr.loss import LambdaARPLoss2
from pytorchltr.loss import LambdaNDCGLoss1
from pytorchltr.loss import LambdaNDCGLoss2
from math import log2
from math import exp
from pytest import approx
def test_lambda_losses_batch():
scores = torch.tensor([
[0.5, 2.0, 1.0],
[0.9, -1.2, 0.0]
])
relevance = torch.tensor([
[2, 0, 1],
[0, 1, 0]
])
n = torch.tensor([3, 2])
loss_fn = LambdaARPLoss1()
loss = loss_fn(scores, relevance, n)
assert float(loss[0]) == approx(13.298417091369629)
assert float(loss[1]) == approx(4.196318626403809)
loss_fn = LambdaARPLoss2()
loss = loss_fn(scores, relevance, n)
assert float(loss[0]) == approx(8.209173202514648)
assert float(loss[1]) == approx(3.1963188648223877)
loss_fn = LambdaNDCGLoss1()
loss = loss_fn(scores, relevance, n)
assert float(loss[0]) == approx(2.629549503326416)
assert float(loss[1]) == approx(2.647582530975342)
loss_fn = LambdaNDCGLoss2()
loss = loss_fn(scores, relevance, n)
assert float(loss[0]) == approx(0.3102627396583557)
assert float(loss[1]) == approx(0.4184933304786682)
def test_lambda_arp1_reshape_scores():
loss_fn = LambdaARPLoss1()
scores = torch.FloatTensor([[0.0, 0.0, 1.0, 2.0, 1.0]])
ys = torch.LongTensor([[[0], [0], [1], [2], [1]]])
n = torch.LongTensor([5])
loss = loss_fn(scores, ys, n)
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
score_diff = float(scores[0, i] - scores[0, j])
inner = 1.0 / (1.0 + exp(-1.0 * score_diff))
inner = inner ** float(ys[0, i, 0])
expected -= log2(inner)
assert loss.item() == approx(expected)
def test_lambda_arp1_reshape_rel():
loss_fn = LambdaARPLoss1()
scores = torch.FloatTensor([[[0.0], [0.0], [1.0], [2.0], [1.0]]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([5])
loss = loss_fn(scores, ys, n)
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
inner = 1.0 / (1.0 + exp(
-1.0 * float(scores[0, i, 0] - scores[0, j, 0])))
inner = inner ** float(ys[0, i])
expected -= log2(inner)
assert loss.item() == approx(expected)
def test_lambda_arp1_perfect():
loss_fn = LambdaARPLoss1()
scores = torch.FloatTensor([[0.0, 0.0, 10.0, 20.0, 10.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([5])
loss = loss_fn(scores, ys, n)
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
inner = 1.0 / (1.0 + exp(
-1.0 * float(scores[0, i] - scores[0, j])))
inner = inner ** float(ys[0, i])
expected -= log2(inner)
assert loss.item() == approx(expected)
def test_lambda_arp1_worst():
loss_fn = LambdaARPLoss1()
scores = torch.FloatTensor([[4.0, 4.0, 2.0, 0.0, 2.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([5])
loss = loss_fn(scores, ys, n)
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
inner = 1.0 / (1.0 + exp(
-1.0 * float(scores[0, i] - scores[0, j])))
inner = inner ** float(ys[0, i])
expected -= log2(inner)
assert loss.item() == approx(expected)
def test_lambda_arp1_mid():
loss_fn = LambdaARPLoss1()
scores = torch.FloatTensor([[0.0, 1.0, 1.0, -2.0, 0.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([4])
loss = loss_fn(scores, ys, n)
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
inner = 1.0 / (1.0 + exp(
-1.0 * float(scores[0, i] - scores[0, j])))
inner = inner ** float(ys[0, i])
expected -= log2(inner)
assert loss.item() == approx(expected)
def test_lambda_arp2_perfect():
loss_fn = LambdaARPLoss2()
scores = torch.FloatTensor([[0.0, 0.0, 10.0, 20.0, 10.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([5])
loss = loss_fn(scores, ys, n)
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
if ys[0, i] > ys[0, j]:
inner = 1.0 + exp(-1.0 * float(scores[0, i] - scores[0, j]))
expected += abs(float(ys[0, i] - ys[0, j])) * log2(inner)
assert loss.item() == approx(expected, rel=1e-06, abs=1e-6)
def test_lambda_arp2_worst():
loss_fn = LambdaARPLoss2()
scores = torch.FloatTensor([[4.0, 4.0, 2.0, 0.0, 2.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([5])
loss = loss_fn(scores, ys, n)
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
if ys[0, i] > ys[0, j]:
inner = 1.0 + exp(-1.0 * float(scores[0, i] - scores[0, j]))
expected += abs(float(ys[0, i] - ys[0, j])) * log2(inner)
assert loss.item() == approx(expected)
def test_lambda_arp2_mid():
loss_fn = LambdaARPLoss2()
scores = torch.FloatTensor([[0.0, 1.0, 1.0, -2.0, 0.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([4])
loss = loss_fn(scores, ys, n)
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
if ys[0, i] > ys[0, j]:
inner = 1.0 + exp(-1.0 * float(scores[0, i] - scores[0, j]))
expected += abs(float(ys[0, i] - ys[0, j])) * log2(inner)
assert loss.item() == approx(expected)
def test_lambda_ndcg1_perfect():
loss_fn = LambdaNDCGLoss1()
scores = torch.FloatTensor([[0.0, 0.0, 10.0, 20.0, 10.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([5])
loss = loss_fn(scores, ys, n)
sorting = [3, 4, 2, 0, 1]
discounts = [log2(2.0 + i) for i in range(5)]
max_dcg = (
(2 ** 2.0 - 1.0) / log2(2.0) +
(2 ** 1.0 - 1.0) / log2(3.0) +
(2 ** 1.0 - 1.0) / log2(4.0))
gains = [((2 ** float(ys[0, i])) - 1.0) / max_dcg
for i in range(5)]
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
si = sorting[i]
sj = sorting[j]
inner = 1.0 + exp(-1.0 * float(scores[0, si] - scores[0, sj]))
inner = (1.0 / inner) ** (gains[si] / discounts[i])
expected -= log2(inner)
assert loss.item() == approx(expected)
def test_lambda_ndcg1_worst():
loss_fn = LambdaNDCGLoss1()
scores = torch.FloatTensor([[4.0, 4.0, 2.0, 0.0, 2.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([5])
loss = loss_fn(scores, ys, n)
sorting = [1, 0, 2, 4, 3]
discounts = [log2(2.0 + i) for i in range(5)]
max_dcg = (
(2 ** 2.0 - 1.0) / log2(2.0) +
(2 ** 1.0 - 1.0) / log2(3.0) +
(2 ** 1.0 - 1.0) / log2(4.0))
gains = [((2 ** float(ys[0, i])) - 1.0) / max_dcg
for i in range(5)]
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
si = sorting[i]
sj = sorting[j]
inner = 1.0 + exp(-1.0 * float(scores[0, si] - scores[0, sj]))
inner = (1.0 / inner) ** (gains[si] / discounts[i])
expected -= log2(inner)
assert loss.item() == approx(expected)
def test_lambda_ndcg1_mid():
loss_fn = LambdaNDCGLoss1()
scores = torch.FloatTensor([[0.0, 1.0, 1.5, -2.0, 0.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([4])
loss = loss_fn(scores, ys, n)
sorting = [2, 1, 0, 3, 4]
discounts = [log2(2.0 + i) for i in range(5)]
max_dcg = (2 ** 2.0 - 1.0) / log2(2.0) + (2 ** 1.0 - 1.0) / log2(3.0)
gains = [((2 ** float(ys[0, i])) - 1.0) / max_dcg
for i in range(5)]
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
si = sorting[i]
sj = sorting[j]
inner = 1.0 + exp(-1.0 * float(scores[0, si] - scores[0, sj]))
inner = (1.0 / inner) ** (gains[si] / discounts[i])
expected -= log2(inner)
assert loss.item() == approx(expected)
def test_lambda_ndcg2_perfect():
loss_fn = LambdaNDCGLoss2()
scores = torch.FloatTensor([[0.0, 0.0, 10.0, 20.0, 10.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([5])
loss = loss_fn(scores, ys, n)
sorting = [3, 4, 2, 0, 1]
discounts = [log2(2.0 + i) for i in range(5 + 1)]
max_dcg = (
(2 ** 2.0 - 1.0) / log2(2.0) +
(2 ** 1.0 - 1.0) / log2(3.0) +
(2 ** 1.0 - 1.0) / log2(4.0))
gains = [((2 ** float(ys[0, i])) - 1.0) / max_dcg
for i in range(5)]
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
si = sorting[i]
sj = sorting[j]
if ys[0, si] > ys[0, sj]:
score_diffs = float(scores[0, si] - scores[0, sj])
inner = 1.0 / (1.0 + exp(-1.0 * score_diffs))
delta_ij = abs((1.0 / discounts[abs(i - j)]) -
(1.0 / discounts[abs(i - j) + 1]))
loss_pair = log2(
inner ** (delta_ij * abs(gains[si] - gains[sj])))
expected -= loss_pair
assert loss.item() == approx(expected, abs=1e-7)
def test_lambda_ndcg2_worst():
loss_fn = LambdaNDCGLoss2()
scores = torch.FloatTensor([[4.0, 4.0, 2.0, 0.0, 2.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([5])
loss = loss_fn(scores, ys, n)
sorting = [1, 0, 2, 4, 3]
discounts = [log2(2.0 + i) for i in range(5 + 1)]
max_dcg = (
(2 ** 2.0 - 1.0) / log2(2.0) +
(2 ** 1.0 - 1.0) / log2(3.0) +
(2 ** 1.0 - 1.0) / log2(4.0))
gains = [((2 ** float(ys[0, i])) - 1.0) / max_dcg
for i in range(5)]
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
si = sorting[i]
sj = sorting[j]
if ys[0, si] > ys[0, sj]:
score_diffs = float(scores[0, si] - scores[0, sj])
inner = 1.0 / (1.0 + exp(-1.0 * score_diffs))
delta_ij = abs((1.0 / discounts[abs(i - j)]) -
(1.0 / discounts[abs(i - j) + 1]))
loss_pair = log2(
inner ** (delta_ij * abs(gains[si] - gains[sj])))
expected -= loss_pair
assert loss.item() == approx(expected)
def test_lambda_ndcg2_mid():
loss_fn = LambdaNDCGLoss2()
scores = torch.FloatTensor([[0.0, 1.0, 1.5, -2.0, 0.0]])
ys = torch.LongTensor([[0, 0, 1, 2, 1]])
n = torch.LongTensor([4])
loss = loss_fn(scores, ys, n)
sorting = [2, 1, 0, 3, 4]
discounts = [log2(2.0 + i) for i in range(5 + 1)]
max_dcg = (2 ** 2.0 - 1.0) / log2(2.0) + (2 ** 1.0 - 1.0) / log2(3.0)
gains = [((2 ** float(ys[0, i])) - 1.0) / max_dcg
for i in range(5)]
# Compute result iteratively
expected = 0.0
for i in range(n[0]):
for j in range(n[0]):
si = sorting[i]
sj = sorting[j]
if ys[0, si] > ys[0, sj]:
score_diffs = float(scores[0, si] - scores[0, sj])
inner = 1.0 / (1.0 + exp(-1.0 * score_diffs))
delta_ij = abs((1.0 / discounts[abs(i - j)]) -
(1.0 / discounts[abs(i - j) + 1]))
loss_pair = log2(
inner ** (delta_ij * abs(gains[si] - gains[sj])))
expected -= loss_pair
assert loss.item() == approx(expected)
| 32.817439 | 76 | 0.514696 | 1,864 | 12,044 | 3.263949 | 0.046674 | 0.030901 | 0.018245 | 0.04142 | 0.889711 | 0.869329 | 0.852728 | 0.852728 | 0.825608 | 0.818705 | 0 | 0.104219 | 0.305297 | 12,044 | 366 | 77 | 32.907104 | 0.622923 | 0.031302 | 0 | 0.837931 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.075862 | 1 | 0.051724 | false | 0 | 0.027586 | 0 | 0.07931 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1e679ac9490cfb527f357d6838b5846965396feb | 1,559 | py | Python | app/core/migrations/0015_auto_20200106_1608.py | Raysultan/roscosmos-stats | 8931ee824c4e4cd67ae4f86ce221515b00d9e872 | [
"MIT"
] | 5 | 2020-11-24T09:57:36.000Z | 2021-11-17T08:02:29.000Z | app/core/migrations/0015_auto_20200106_1608.py | raisultan/roscosmos-api | 8931ee824c4e4cd67ae4f86ce221515b00d9e872 | [
"MIT"
] | null | null | null | app/core/migrations/0015_auto_20200106_1608.py | raisultan/roscosmos-api | 8931ee824c4e4cd67ae4f86ce221515b00d9e872 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.2 on 2020-01-06 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0014_auto_20200106_0226'),
]
operations = [
migrations.AddField(
model_name='launchpad',
name='image',
field=models.ImageField(max_length=255, null=True, upload_to='images/'),
),
migrations.AddField(
model_name='launchvehicle',
name='image',
field=models.ImageField(max_length=255, null=True, upload_to='images/'),
),
migrations.AddField(
model_name='orbitalgrouping',
name='image',
field=models.ImageField(max_length=255, null=True, upload_to='images/'),
),
migrations.AddField(
model_name='spacecraft',
name='image',
field=models.ImageField(max_length=255, null=True, upload_to='images/'),
),
migrations.AddField(
model_name='spaceobservatory',
name='image',
field=models.ImageField(max_length=255, null=True, upload_to='images/'),
),
migrations.AddField(
model_name='spacestation',
name='image',
field=models.ImageField(max_length=255, null=True, upload_to='images/'),
),
migrations.AddField(
model_name='spacetug',
name='image',
field=models.ImageField(max_length=255, null=True, upload_to='images/'),
),
]
| 31.816327 | 84 | 0.57152 | 155 | 1,559 | 5.593548 | 0.303226 | 0.145329 | 0.185698 | 0.217993 | 0.703576 | 0.703576 | 0.703576 | 0.703576 | 0.703576 | 0.703576 | 0 | 0.047619 | 0.299551 | 1,559 | 48 | 85 | 32.479167 | 0.746337 | 0.028865 | 0 | 0.666667 | 1 | 0 | 0.128307 | 0.015212 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.02381 | 0 | 0.095238 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1e863bb1cad91ca839384b19dbe926ec8fdb8270 | 45,651 | py | Python | src/python_pachyderm/proto/admin/v1_9/pfs/pfs_pb2_grpc.py | barretthinson/python-pachyderm | 82cea22d1105d70833a5522ccac750ca521694ff | [
"Apache-2.0"
] | null | null | null | src/python_pachyderm/proto/admin/v1_9/pfs/pfs_pb2_grpc.py | barretthinson/python-pachyderm | 82cea22d1105d70833a5522ccac750ca521694ff | [
"Apache-2.0"
] | null | null | null | src/python_pachyderm/proto/admin/v1_9/pfs/pfs_pb2_grpc.py | barretthinson/python-pachyderm | 82cea22d1105d70833a5522ccac750ca521694ff | [
"Apache-2.0"
] | null | null | null | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from python_pachyderm.proto.admin.v1_9.pfs import pfs_pb2 as client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
class APIStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateRepo = channel.unary_unary(
'/pfs_1_9.API/CreateRepo',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CreateRepoRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.InspectRepo = channel.unary_unary(
'/pfs_1_9.API/InspectRepo',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.InspectRepoRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.RepoInfo.FromString,
)
self.ListRepo = channel.unary_unary(
'/pfs_1_9.API/ListRepo',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListRepoRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListRepoResponse.FromString,
)
self.DeleteRepo = channel.unary_unary(
'/pfs_1_9.API/DeleteRepo',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteRepoRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.StartCommit = channel.unary_unary(
'/pfs_1_9.API/StartCommit',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.StartCommitRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Commit.FromString,
)
self.FinishCommit = channel.unary_unary(
'/pfs_1_9.API/FinishCommit',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FinishCommitRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.InspectCommit = channel.unary_unary(
'/pfs_1_9.API/InspectCommit',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.InspectCommitRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CommitInfo.FromString,
)
self.ListCommit = channel.unary_unary(
'/pfs_1_9.API/ListCommit',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListCommitRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CommitInfos.FromString,
)
self.ListCommitStream = channel.unary_stream(
'/pfs_1_9.API/ListCommitStream',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListCommitRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CommitInfo.FromString,
)
self.DeleteCommit = channel.unary_unary(
'/pfs_1_9.API/DeleteCommit',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteCommitRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.FlushCommit = channel.unary_stream(
'/pfs_1_9.API/FlushCommit',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FlushCommitRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CommitInfo.FromString,
)
self.SubscribeCommit = channel.unary_stream(
'/pfs_1_9.API/SubscribeCommit',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.SubscribeCommitRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CommitInfo.FromString,
)
self.BuildCommit = channel.unary_unary(
'/pfs_1_9.API/BuildCommit',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.BuildCommitRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Commit.FromString,
)
self.CreateBranch = channel.unary_unary(
'/pfs_1_9.API/CreateBranch',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CreateBranchRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.InspectBranch = channel.unary_unary(
'/pfs_1_9.API/InspectBranch',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.InspectBranchRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.BranchInfo.FromString,
)
self.ListBranch = channel.unary_unary(
'/pfs_1_9.API/ListBranch',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListBranchRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.BranchInfos.FromString,
)
self.DeleteBranch = channel.unary_unary(
'/pfs_1_9.API/DeleteBranch',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteBranchRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.PutFile = channel.stream_unary(
'/pfs_1_9.API/PutFile',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.PutFileRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CopyFile = channel.unary_unary(
'/pfs_1_9.API/CopyFile',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CopyFileRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetFile = channel.unary_stream(
'/pfs_1_9.API/GetFile',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GetFileRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.FromString,
)
self.InspectFile = channel.unary_unary(
'/pfs_1_9.API/InspectFile',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.InspectFileRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfo.FromString,
)
self.ListFile = channel.unary_unary(
'/pfs_1_9.API/ListFile',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListFileRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfos.FromString,
)
self.ListFileStream = channel.unary_stream(
'/pfs_1_9.API/ListFileStream',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListFileRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfo.FromString,
)
self.WalkFile = channel.unary_stream(
'/pfs_1_9.API/WalkFile',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.WalkFileRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfo.FromString,
)
self.GlobFile = channel.unary_unary(
'/pfs_1_9.API/GlobFile',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GlobFileRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfos.FromString,
)
self.GlobFileStream = channel.unary_stream(
'/pfs_1_9.API/GlobFileStream',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GlobFileRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfo.FromString,
)
self.DiffFile = channel.unary_unary(
'/pfs_1_9.API/DiffFile',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DiffFileRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DiffFileResponse.FromString,
)
self.DeleteFile = channel.unary_unary(
'/pfs_1_9.API/DeleteFile',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteFileRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.DeleteAll = channel.unary_unary(
'/pfs_1_9.API/DeleteAll',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Fsck = channel.unary_stream(
'/pfs_1_9.API/Fsck',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FsckRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FsckResponse.FromString,
)
class APIServicer(object):
# missing associated documentation comment in .proto file
pass
def CreateRepo(self, request, context):
"""Repo rpcs
CreateRepo creates a new repo.
An error is returned if the repo already exists.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectRepo(self, request, context):
"""InspectRepo returns info about a repo.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListRepo(self, request, context):
"""ListRepo returns info about all repos.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteRepo(self, request, context):
"""DeleteRepo deletes a repo.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StartCommit(self, request, context):
"""Commit rpcs
StartCommit creates a new write commit from a parent commit.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def FinishCommit(self, request, context):
"""FinishCommit turns a write commit into a read commit.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectCommit(self, request, context):
"""InspectCommit returns the info about a commit.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListCommit(self, request, context):
"""ListCommit returns info about all commits. This is deprecated in favor of
ListCommitStream.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListCommitStream(self, request, context):
"""ListCommitStream is like ListCommit, but returns its results in a GRPC stream
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteCommit(self, request, context):
"""DeleteCommit deletes a commit.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def FlushCommit(self, request, context):
"""FlushCommit waits for downstream commits to finish
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SubscribeCommit(self, request, context):
"""SubscribeCommit subscribes for new commits on a given branch
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def BuildCommit(self, request, context):
"""BuildCommit builds a commit that's backed by the given tree
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateBranch(self, request, context):
"""CreateBranch creates a new branch
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectBranch(self, request, context):
"""InspectBranch returns info about a branch.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListBranch(self, request, context):
"""ListBranch returns info about the heads of branches.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteBranch(self, request, context):
"""DeleteBranch deletes a branch; note that the commits still exist.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PutFile(self, request_iterator, context):
"""File rpcs
PutFile writes the specified file to pfs.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CopyFile(self, request, context):
"""CopyFile copies the contents of one file to another.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetFile(self, request, context):
"""GetFile returns a byte stream of the contents of the file.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectFile(self, request, context):
"""InspectFile returns info about a file.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListFile(self, request, context):
"""ListFile returns info about all files. This is deprecated in favor of
ListFileStream
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListFileStream(self, request, context):
"""ListFileStream is a streaming version of ListFile
TODO(msteffen): When the dash has been updated to use ListFileStream,
replace ListFile with this RPC (https://github.com/pachyderm/dash/issues/201)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def WalkFile(self, request, context):
"""WalkFile walks over all the files under a directory, including children of children.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GlobFile(self, request, context):
"""GlobFile returns info about all files. This is deprecated in favor of
GlobFileStream
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GlobFileStream(self, request, context):
"""GlobFileStream is a streaming version of GlobFile
TODO(msteffen): When the dash has been updated to use GlobFileStream,
replace GlobFile with this RPC (https://github.com/pachyderm/dash/issues/201)
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DiffFile(self, request, context):
"""DiffFile returns the differences between 2 paths at 2 commits.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteFile(self, request, context):
"""DeleteFile deletes a file.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAll(self, request, context):
"""DeleteAll deletes everything
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Fsck(self, request, context):
"""Fsck does a file system consistency check for pfs
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_APIServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateRepo': grpc.unary_unary_rpc_method_handler(
servicer.CreateRepo,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CreateRepoRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'InspectRepo': grpc.unary_unary_rpc_method_handler(
servicer.InspectRepo,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.InspectRepoRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.RepoInfo.SerializeToString,
),
'ListRepo': grpc.unary_unary_rpc_method_handler(
servicer.ListRepo,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListRepoRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListRepoResponse.SerializeToString,
),
'DeleteRepo': grpc.unary_unary_rpc_method_handler(
servicer.DeleteRepo,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteRepoRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'StartCommit': grpc.unary_unary_rpc_method_handler(
servicer.StartCommit,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.StartCommitRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Commit.SerializeToString,
),
'FinishCommit': grpc.unary_unary_rpc_method_handler(
servicer.FinishCommit,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FinishCommitRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'InspectCommit': grpc.unary_unary_rpc_method_handler(
servicer.InspectCommit,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.InspectCommitRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CommitInfo.SerializeToString,
),
'ListCommit': grpc.unary_unary_rpc_method_handler(
servicer.ListCommit,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListCommitRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CommitInfos.SerializeToString,
),
'ListCommitStream': grpc.unary_stream_rpc_method_handler(
servicer.ListCommitStream,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListCommitRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CommitInfo.SerializeToString,
),
'DeleteCommit': grpc.unary_unary_rpc_method_handler(
servicer.DeleteCommit,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteCommitRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'FlushCommit': grpc.unary_stream_rpc_method_handler(
servicer.FlushCommit,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FlushCommitRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CommitInfo.SerializeToString,
),
'SubscribeCommit': grpc.unary_stream_rpc_method_handler(
servicer.SubscribeCommit,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.SubscribeCommitRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CommitInfo.SerializeToString,
),
'BuildCommit': grpc.unary_unary_rpc_method_handler(
servicer.BuildCommit,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.BuildCommitRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Commit.SerializeToString,
),
'CreateBranch': grpc.unary_unary_rpc_method_handler(
servicer.CreateBranch,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CreateBranchRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'InspectBranch': grpc.unary_unary_rpc_method_handler(
servicer.InspectBranch,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.InspectBranchRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.BranchInfo.SerializeToString,
),
'ListBranch': grpc.unary_unary_rpc_method_handler(
servicer.ListBranch,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListBranchRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.BranchInfos.SerializeToString,
),
'DeleteBranch': grpc.unary_unary_rpc_method_handler(
servicer.DeleteBranch,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteBranchRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'PutFile': grpc.stream_unary_rpc_method_handler(
servicer.PutFile,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.PutFileRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CopyFile': grpc.unary_unary_rpc_method_handler(
servicer.CopyFile,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CopyFileRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetFile': grpc.unary_stream_rpc_method_handler(
servicer.GetFile,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GetFileRequest.FromString,
response_serializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.SerializeToString,
),
'InspectFile': grpc.unary_unary_rpc_method_handler(
servicer.InspectFile,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.InspectFileRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfo.SerializeToString,
),
'ListFile': grpc.unary_unary_rpc_method_handler(
servicer.ListFile,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListFileRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfos.SerializeToString,
),
'ListFileStream': grpc.unary_stream_rpc_method_handler(
servicer.ListFileStream,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListFileRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfo.SerializeToString,
),
'WalkFile': grpc.unary_stream_rpc_method_handler(
servicer.WalkFile,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.WalkFileRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfo.SerializeToString,
),
'GlobFile': grpc.unary_unary_rpc_method_handler(
servicer.GlobFile,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GlobFileRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfos.SerializeToString,
),
'GlobFileStream': grpc.unary_stream_rpc_method_handler(
servicer.GlobFileStream,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GlobFileRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FileInfo.SerializeToString,
),
'DiffFile': grpc.unary_unary_rpc_method_handler(
servicer.DiffFile,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DiffFileRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DiffFileResponse.SerializeToString,
),
'DeleteFile': grpc.unary_unary_rpc_method_handler(
servicer.DeleteFile,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteFileRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'DeleteAll': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAll,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Fsck': grpc.unary_stream_rpc_method_handler(
servicer.Fsck,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FsckRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.FsckResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'pfs_1_9.API', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class ObjectAPIStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.PutObject = channel.stream_unary(
'/pfs_1_9.ObjectAPI/PutObject',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.PutObjectRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Object.FromString,
)
self.PutObjectSplit = channel.stream_unary(
'/pfs_1_9.ObjectAPI/PutObjectSplit',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.PutObjectRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Objects.FromString,
)
self.PutObjects = channel.stream_unary(
'/pfs_1_9.ObjectAPI/PutObjects',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.PutObjectRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.CreateObject = channel.unary_unary(
'/pfs_1_9.ObjectAPI/CreateObject',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CreateObjectRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetObject = channel.unary_stream(
'/pfs_1_9.ObjectAPI/GetObject',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Object.SerializeToString,
response_deserializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.FromString,
)
self.GetObjects = channel.unary_stream(
'/pfs_1_9.ObjectAPI/GetObjects',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GetObjectsRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.FromString,
)
self.PutBlock = channel.stream_unary(
'/pfs_1_9.ObjectAPI/PutBlock',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.PutBlockRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.GetBlock = channel.unary_stream(
'/pfs_1_9.ObjectAPI/GetBlock',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GetBlockRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.FromString,
)
self.GetBlocks = channel.unary_stream(
'/pfs_1_9.ObjectAPI/GetBlocks',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GetBlocksRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.FromString,
)
self.ListBlock = channel.unary_stream(
'/pfs_1_9.ObjectAPI/ListBlock',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListBlockRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Block.FromString,
)
self.TagObject = channel.unary_unary(
'/pfs_1_9.ObjectAPI/TagObject',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.TagObjectRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.InspectObject = channel.unary_unary(
'/pfs_1_9.ObjectAPI/InspectObject',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Object.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ObjectInfo.FromString,
)
self.CheckObject = channel.unary_unary(
'/pfs_1_9.ObjectAPI/CheckObject',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CheckObjectRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CheckObjectResponse.FromString,
)
self.ListObjects = channel.unary_stream(
'/pfs_1_9.ObjectAPI/ListObjects',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListObjectsRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ObjectInfo.FromString,
)
self.DeleteObjects = channel.unary_unary(
'/pfs_1_9.ObjectAPI/DeleteObjects',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteObjectsRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteObjectsResponse.FromString,
)
self.GetTag = channel.unary_stream(
'/pfs_1_9.ObjectAPI/GetTag',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Tag.SerializeToString,
response_deserializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.FromString,
)
self.InspectTag = channel.unary_unary(
'/pfs_1_9.ObjectAPI/InspectTag',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Tag.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ObjectInfo.FromString,
)
self.ListTags = channel.unary_stream(
'/pfs_1_9.ObjectAPI/ListTags',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListTagsRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListTagsResponse.FromString,
)
self.DeleteTags = channel.unary_unary(
'/pfs_1_9.ObjectAPI/DeleteTags',
request_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteTagsRequest.SerializeToString,
response_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteTagsResponse.FromString,
)
self.Compact = channel.unary_unary(
'/pfs_1_9.ObjectAPI/Compact',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class ObjectAPIServicer(object):
# missing associated documentation comment in .proto file
pass
def PutObject(self, request_iterator, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PutObjectSplit(self, request_iterator, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PutObjects(self, request_iterator, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateObject(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetObject(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetObjects(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PutBlock(self, request_iterator, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetBlock(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetBlocks(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListBlock(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def TagObject(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectObject(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CheckObject(self, request, context):
"""CheckObject checks if an object exists in the blob store without
actually reading the object.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListObjects(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteObjects(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetTag(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InspectTag(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListTags(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteTags(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Compact(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ObjectAPIServicer_to_server(servicer, server):
rpc_method_handlers = {
'PutObject': grpc.stream_unary_rpc_method_handler(
servicer.PutObject,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.PutObjectRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Object.SerializeToString,
),
'PutObjectSplit': grpc.stream_unary_rpc_method_handler(
servicer.PutObjectSplit,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.PutObjectRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Objects.SerializeToString,
),
'PutObjects': grpc.stream_unary_rpc_method_handler(
servicer.PutObjects,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.PutObjectRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'CreateObject': grpc.unary_unary_rpc_method_handler(
servicer.CreateObject,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CreateObjectRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetObject': grpc.unary_stream_rpc_method_handler(
servicer.GetObject,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Object.FromString,
response_serializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.SerializeToString,
),
'GetObjects': grpc.unary_stream_rpc_method_handler(
servicer.GetObjects,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GetObjectsRequest.FromString,
response_serializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.SerializeToString,
),
'PutBlock': grpc.stream_unary_rpc_method_handler(
servicer.PutBlock,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.PutBlockRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'GetBlock': grpc.unary_stream_rpc_method_handler(
servicer.GetBlock,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GetBlockRequest.FromString,
response_serializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.SerializeToString,
),
'GetBlocks': grpc.unary_stream_rpc_method_handler(
servicer.GetBlocks,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.GetBlocksRequest.FromString,
response_serializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.SerializeToString,
),
'ListBlock': grpc.unary_stream_rpc_method_handler(
servicer.ListBlock,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListBlockRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Block.SerializeToString,
),
'TagObject': grpc.unary_unary_rpc_method_handler(
servicer.TagObject,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.TagObjectRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'InspectObject': grpc.unary_unary_rpc_method_handler(
servicer.InspectObject,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Object.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ObjectInfo.SerializeToString,
),
'CheckObject': grpc.unary_unary_rpc_method_handler(
servicer.CheckObject,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CheckObjectRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.CheckObjectResponse.SerializeToString,
),
'ListObjects': grpc.unary_stream_rpc_method_handler(
servicer.ListObjects,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListObjectsRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ObjectInfo.SerializeToString,
),
'DeleteObjects': grpc.unary_unary_rpc_method_handler(
servicer.DeleteObjects,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteObjectsRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteObjectsResponse.SerializeToString,
),
'GetTag': grpc.unary_stream_rpc_method_handler(
servicer.GetTag,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Tag.FromString,
response_serializer=google_dot_protobuf_dot_wrappers__pb2.BytesValue.SerializeToString,
),
'InspectTag': grpc.unary_unary_rpc_method_handler(
servicer.InspectTag,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.Tag.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ObjectInfo.SerializeToString,
),
'ListTags': grpc.unary_stream_rpc_method_handler(
servicer.ListTags,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListTagsRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.ListTagsResponse.SerializeToString,
),
'DeleteTags': grpc.unary_unary_rpc_method_handler(
servicer.DeleteTags,
request_deserializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteTagsRequest.FromString,
response_serializer=client_dot_admin_dot_v1__9_dot_pfs_dot_pfs__pb2.DeleteTagsResponse.SerializeToString,
),
'Compact': grpc.unary_unary_rpc_method_handler(
servicer.Compact,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'pfs_1_9.ObjectAPI', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 49.674646 | 118 | 0.774222 | 5,485 | 45,651 | 5.924157 | 0.051413 | 0.057241 | 0.066782 | 0.081092 | 0.845848 | 0.842986 | 0.839386 | 0.735859 | 0.735859 | 0.735859 | 0 | 0.016332 | 0.155002 | 45,651 | 918 | 119 | 49.728758 | 0.826027 | 0.079188 | 0 | 0.44698 | 1 | 0 | 0.098639 | 0.029285 | 0 | 0 | 0 | 0.002179 | 0 | 1 | 0.072483 | false | 0.030872 | 0.005369 | 0 | 0.083221 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1ec140b76b9d4f76d4c6124db80ba31a09311433 | 7,205 | py | Python | tests/pyne_test_collector_test.py | Avvir/pyne | 864885a8fb632b72c00af164f150b1daa38a346f | [
"MIT"
] | 4 | 2018-08-10T20:05:10.000Z | 2019-07-24T15:29:32.000Z | tests/pyne_test_collector_test.py | Avvir/pyne | 864885a8fb632b72c00af164f150b1daa38a346f | [
"MIT"
] | 6 | 2018-09-25T20:15:51.000Z | 2021-12-22T17:09:52.000Z | tests/pyne_test_collector_test.py | Avvir/pyne | 864885a8fb632b72c00af164f150b1daa38a346f | [
"MIT"
] | null | null | null | from pynetest.lib.pyne_test_blocks import DescribeBlock
from pynetest.pyne_test_collector import test_collection, it, describe, before_each, fit, fdescribe, after_each
from pynetest.expectations import expect
def test__it__adds_it_block_to_current_describe():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
it(some_method)
expect(current_describe.it_blocks).to_have_length(1)
expect(current_describe.it_blocks[0].method).to_be(some_method)
def test__it__when_using_string_description__adds_it_block_to_describe():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
it("some it name")(some_method)
expect(current_describe.it_blocks).to_have_length(1)
expect(current_describe.it_blocks[0].method).to_be(some_method)
def test__it__when_using_string_description__sets_the_description():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
it("some cool thing happens")(some_method)
expect(current_describe.it_blocks[0].description).to_be("some cool thing happens")
def test__fit__adds_an_it_block_to_current_describe():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
fit(some_method)
expect(current_describe.it_blocks).to_have_length(1)
expect(current_describe.it_blocks[0].method).to_be(some_method)
def test__fit__flags_the_it_block_as_focused():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
fit(some_method)
expect(current_describe.it_blocks[0].focused).to_be(True)
def test__fit__flags_ancestors_as_having_focused_descendant():
grandparent_describe = DescribeBlock(None, None, None)
parent_describe = DescribeBlock(grandparent_describe, None, None)
current_describe = DescribeBlock(parent_describe, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
fit(some_method)
expect(current_describe.has_focused_descendants).to_be(True)
expect(parent_describe.has_focused_descendants).to_be(True)
expect(grandparent_describe.has_focused_descendants).to_be(True)
def test__fdescribe__flags_ancestors_as_having_focused_descendant():
grandparent_describe = DescribeBlock(None, None, None)
parent_describe = DescribeBlock(grandparent_describe, None, None)
current_describe = DescribeBlock(parent_describe, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
fdescribe("some context")(some_method)
expect(current_describe.has_focused_descendants).to_be(True)
expect(parent_describe.has_focused_descendants).to_be(True)
expect(grandparent_describe.has_focused_descendants).to_be(True)
def test__fdescribe__adds_a_describe_block_to_current_describe():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
fdescribe("some context")(some_method)
expect(current_describe.describe_blocks).to_have_length(1)
expect(current_describe.describe_blocks[0].method).to_be(some_method)
def test__fdescribe__flags_the_it_block_as_focused():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
fdescribe("some context")(some_method)
expect(current_describe.describe_blocks[0].focused).to_be(True)
def test__describe__adds_describe_block_to_current_describe():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
describe(some_method)
expect(current_describe.describe_blocks).to_have_length(1)
expect(current_describe.describe_blocks[0].method).to_be(some_method)
def test__describe__when_using_string_description__adds_describe_block_to_current_describe():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
describe("some context")(some_method)
expect(current_describe.describe_blocks).to_have_length(1)
expect(current_describe.describe_blocks[0].method).to_be(some_method)
def test__describe__when_using_string_description__sets_description():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
describe("some awesome description")(some_method)
expect(current_describe.describe_blocks[0].description).to_be("some awesome description")
def test__before_each__adds_before_each_block_to_current_describe():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
before_each(some_method)
expect(current_describe.before_each_blocks).to_have_length(1)
expect(current_describe.before_each_blocks[0].method).to_be(some_method)
def test__after_each__adds_after_each_block_to_current_describe():
current_describe = DescribeBlock(None, None, None)
test_collection.current_describe = current_describe
def some_method():
pass
after_each(some_method)
expect(current_describe.after_each_blocks).to_have_length(1)
expect(current_describe.after_each_blocks[0].method).to_be(some_method)
expect(current_describe.after_each_blocks[0].description).to_be("@after_each")
def test__collect_describe__adds_children_to_the_describe():
def describe_block_method():
@describe
def when_something_happens():
pass
@it
def does_something():
pass
@before_each
def do():
pass
describe_block = DescribeBlock(None, None, describe_block_method)
test_collection.collect_describe(describe_block)
expect(describe_block.before_each_blocks).to_have_length(1)
expect(describe_block.describe_blocks).to_have_length(1)
expect(describe_block.it_blocks).to_have_length(1)
def test__collect_describe__when_there_are_nested_describes__collects_them():
def describe_block_method():
@describe
def when_something_happens():
@before_each
def do():
pass
@it
def does_something():
pass
@describe
def when_something_is_true():
pass
describe_block = DescribeBlock(None, None, describe_block_method)
test_collection.collect_describe(describe_block)
expect(describe_block.describe_blocks[0].before_each_blocks).to_have_length(1)
expect(describe_block.describe_blocks[0].describe_blocks).to_have_length(1)
expect(describe_block.describe_blocks[0].it_blocks).to_have_length(1)
| 31.600877 | 111 | 0.762665 | 917 | 7,205 | 5.522356 | 0.076336 | 0.21327 | 0.095379 | 0.124408 | 0.897314 | 0.886058 | 0.848144 | 0.846959 | 0.798578 | 0.745656 | 0 | 0.004941 | 0.157252 | 7,205 | 227 | 112 | 31.740088 | 0.829051 | 0 | 0 | 0.717105 | 0 | 0 | 0.022901 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.256579 | false | 0.131579 | 0.019737 | 0 | 0.276316 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 10 |
1edd54bae79c34c70b06d80ecc88e46f918da744 | 100 | py | Python | recstudio/model/retriever.py | ustc-recsys/Torchrec | 4d62ee42018c12961850936cfd8f4f8d3c6a8dbc | [
"MIT"
] | 1 | 2021-11-13T12:12:54.000Z | 2021-11-13T12:12:54.000Z | recstudio/model/retriever.py | ustc-recsys/Torchrec | 4d62ee42018c12961850936cfd8f4f8d3c6a8dbc | [
"MIT"
] | null | null | null | recstudio/model/retriever.py | ustc-recsys/Torchrec | 4d62ee42018c12961850936cfd8f4f8d3c6a8dbc | [
"MIT"
] | null | null | null | from recstudio.model.mf import *
from recstudio.model.seq import *
from recstudio.model.ae import *
| 25 | 33 | 0.79 | 15 | 100 | 5.266667 | 0.466667 | 0.493671 | 0.683544 | 0.607595 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12 | 100 | 3 | 34 | 33.333333 | 0.897727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
94eb3b7318c83257e60668054b9cb0378fa7bee5 | 121 | py | Python | vilmedic/networks/__init__.py | jbdel/vilmedic | 17d462a540a2632811cc2a78edd2861800a33b07 | [
"MIT"
] | 15 | 2021-07-24T10:41:07.000Z | 2022-03-27T14:40:47.000Z | vilmedic/networks/__init__.py | jbdel/vilmedic | 17d462a540a2632811cc2a78edd2861800a33b07 | [
"MIT"
] | null | null | null | vilmedic/networks/__init__.py | jbdel/vilmedic | 17d462a540a2632811cc2a78edd2861800a33b07 | [
"MIT"
] | 2 | 2022-02-22T17:37:22.000Z | 2022-03-20T12:55:40.000Z | from vilmedic.networks.blocks.huggingface import *
from vilmedic.networks.blocks.vision import CNN
from .models import *
| 30.25 | 50 | 0.826446 | 16 | 121 | 6.25 | 0.5625 | 0.24 | 0.4 | 0.52 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.099174 | 121 | 3 | 51 | 40.333333 | 0.917431 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
94f09912238db47bb129154c53b15cddb3f5c828 | 11,442 | py | Python | python/src/toit/api/pubsub/subscribe_pb2_grpc.py | toitware/ap | 4b72d7ed43efe6b7e79bee1bfb5a9fc81fa16edb | [
"MIT"
] | 7 | 2020-03-20T14:10:53.000Z | 2021-11-28T04:05:24.000Z | python/src/toit/api/pubsub/subscribe_pb2_grpc.py | toitware/ap | 4b72d7ed43efe6b7e79bee1bfb5a9fc81fa16edb | [
"MIT"
] | 9 | 2020-03-19T06:54:17.000Z | 2022-03-17T05:07:00.000Z | python/src/toit/api/pubsub/subscribe_pb2_grpc.py | toitware/ap | 4b72d7ed43efe6b7e79bee1bfb5a9fc81fa16edb | [
"MIT"
] | 1 | 2021-08-15T16:31:07.000Z | 2021-08-15T16:31:07.000Z | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from toit.api.pubsub import subscribe_pb2 as toit_dot_api_dot_pubsub_dot_subscribe__pb2
class SubscribeStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateSubscription = channel.unary_unary(
'/toit.api.pubsub.Subscribe/CreateSubscription',
request_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.CreateSubscriptionRequest.SerializeToString,
response_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.CreateSubscriptionResponse.FromString,
)
self.DeleteSubscription = channel.unary_unary(
'/toit.api.pubsub.Subscribe/DeleteSubscription',
request_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.DeleteSubscriptionRequest.SerializeToString,
response_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.DeleteSubscriptionResponse.FromString,
)
self.ListSubscriptions = channel.unary_unary(
'/toit.api.pubsub.Subscribe/ListSubscriptions',
request_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.ListSubscriptionsRequest.SerializeToString,
response_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.ListSubscriptionsResponse.FromString,
)
self.Fetch = channel.unary_unary(
'/toit.api.pubsub.Subscribe/Fetch',
request_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.FetchRequest.SerializeToString,
response_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.FetchResponse.FromString,
)
self.Stream = channel.unary_stream(
'/toit.api.pubsub.Subscribe/Stream',
request_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.StreamRequest.SerializeToString,
response_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.StreamResponse.FromString,
)
self.Acknowledge = channel.unary_unary(
'/toit.api.pubsub.Subscribe/Acknowledge',
request_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.AcknowledgeRequest.SerializeToString,
response_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.AcknowledgeResponse.FromString,
)
class SubscribeServicer(object):
"""Missing associated documentation comment in .proto file."""
def CreateSubscription(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteSubscription(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListSubscriptions(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Fetch(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Stream(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Acknowledge(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_SubscribeServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateSubscription': grpc.unary_unary_rpc_method_handler(
servicer.CreateSubscription,
request_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.CreateSubscriptionRequest.FromString,
response_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.CreateSubscriptionResponse.SerializeToString,
),
'DeleteSubscription': grpc.unary_unary_rpc_method_handler(
servicer.DeleteSubscription,
request_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.DeleteSubscriptionRequest.FromString,
response_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.DeleteSubscriptionResponse.SerializeToString,
),
'ListSubscriptions': grpc.unary_unary_rpc_method_handler(
servicer.ListSubscriptions,
request_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.ListSubscriptionsRequest.FromString,
response_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.ListSubscriptionsResponse.SerializeToString,
),
'Fetch': grpc.unary_unary_rpc_method_handler(
servicer.Fetch,
request_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.FetchRequest.FromString,
response_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.FetchResponse.SerializeToString,
),
'Stream': grpc.unary_stream_rpc_method_handler(
servicer.Stream,
request_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.StreamRequest.FromString,
response_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.StreamResponse.SerializeToString,
),
'Acknowledge': grpc.unary_unary_rpc_method_handler(
servicer.Acknowledge,
request_deserializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.AcknowledgeRequest.FromString,
response_serializer=toit_dot_api_dot_pubsub_dot_subscribe__pb2.AcknowledgeResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'toit.api.pubsub.Subscribe', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Subscribe(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def CreateSubscription(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/toit.api.pubsub.Subscribe/CreateSubscription',
toit_dot_api_dot_pubsub_dot_subscribe__pb2.CreateSubscriptionRequest.SerializeToString,
toit_dot_api_dot_pubsub_dot_subscribe__pb2.CreateSubscriptionResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteSubscription(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/toit.api.pubsub.Subscribe/DeleteSubscription',
toit_dot_api_dot_pubsub_dot_subscribe__pb2.DeleteSubscriptionRequest.SerializeToString,
toit_dot_api_dot_pubsub_dot_subscribe__pb2.DeleteSubscriptionResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListSubscriptions(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/toit.api.pubsub.Subscribe/ListSubscriptions',
toit_dot_api_dot_pubsub_dot_subscribe__pb2.ListSubscriptionsRequest.SerializeToString,
toit_dot_api_dot_pubsub_dot_subscribe__pb2.ListSubscriptionsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Fetch(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/toit.api.pubsub.Subscribe/Fetch',
toit_dot_api_dot_pubsub_dot_subscribe__pb2.FetchRequest.SerializeToString,
toit_dot_api_dot_pubsub_dot_subscribe__pb2.FetchResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Stream(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/toit.api.pubsub.Subscribe/Stream',
toit_dot_api_dot_pubsub_dot_subscribe__pb2.StreamRequest.SerializeToString,
toit_dot_api_dot_pubsub_dot_subscribe__pb2.StreamResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Acknowledge(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/toit.api.pubsub.Subscribe/Acknowledge',
toit_dot_api_dot_pubsub_dot_subscribe__pb2.AcknowledgeRequest.SerializeToString,
toit_dot_api_dot_pubsub_dot_subscribe__pb2.AcknowledgeResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 49.318966 | 128 | 0.693235 | 1,101 | 11,442 | 6.808356 | 0.099001 | 0.060832 | 0.04936 | 0.064168 | 0.858191 | 0.835245 | 0.830576 | 0.779216 | 0.767476 | 0.720251 | 0 | 0.004358 | 0.237895 | 11,442 | 231 | 129 | 49.532468 | 0.855275 | 0.064936 | 0 | 0.5 | 1 | 0 | 0.08006 | 0.047 | 0 | 0 | 0 | 0 | 0 | 1 | 0.072917 | false | 0 | 0.010417 | 0.03125 | 0.130208 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a222db6d206034f09380f6e103429cdb39419d4f | 2,537 | py | Python | kpi/visiting_management/migrations/0012_visiting_created_visiting_created_by_and_more.py | UniversitaDellaCalabria/kpiManagement | d045a464298e17f50e005b89ba3b71e53d57f368 | [
"Apache-2.0"
] | null | null | null | kpi/visiting_management/migrations/0012_visiting_created_visiting_created_by_and_more.py | UniversitaDellaCalabria/kpiManagement | d045a464298e17f50e005b89ba3b71e53d57f368 | [
"Apache-2.0"
] | null | null | null | kpi/visiting_management/migrations/0012_visiting_created_visiting_created_by_and_more.py | UniversitaDellaCalabria/kpiManagement | d045a464298e17f50e005b89ba3b71e53d57f368 | [
"Apache-2.0"
] | 1 | 2022-03-28T10:48:38.000Z | 2022-03-28T10:48:38.000Z | # Generated by Django 4.0.3 on 2022-04-07 10:59
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('visiting_management', '0011_visiting_is_active'),
]
operations = [
migrations.AddField(
model_name='visiting',
name='created',
field=models.DateTimeField(
auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='visiting',
name='created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='visiting',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='visiting',
name='modified_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='%(class)s_modified_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='visitingcollaboration',
name='created',
field=models.DateTimeField(
auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='visitingcollaboration',
name='created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='visitingcollaboration',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='visitingcollaboration',
name='modified_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='%(class)s_modified_by', to=settings.AUTH_USER_MODEL),
),
]
| 38.439394 | 104 | 0.599527 | 257 | 2,537 | 5.70428 | 0.225681 | 0.098226 | 0.125512 | 0.14734 | 0.800819 | 0.800819 | 0.800819 | 0.731924 | 0.731924 | 0.731924 | 0 | 0.010626 | 0.295231 | 2,537 | 65 | 105 | 39.030769 | 0.809284 | 0.017737 | 0 | 0.813559 | 1 | 0 | 0.125301 | 0.059839 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.067797 | 0 | 0.118644 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
bf3f7f571e17d799815eeede40cc155940cc0015 | 28,987 | py | Python | lib/backup_service_client/api/active_repository_api.py | sumedhpb/TAF | fc6f4cb8dc0b8234393f2e52a7b4a1aa723d9449 | [
"Apache-2.0"
] | 9 | 2019-02-19T05:55:00.000Z | 2022-01-20T10:37:28.000Z | lib/backup_service_client/api/active_repository_api.py | sumedhpb/TAF | fc6f4cb8dc0b8234393f2e52a7b4a1aa723d9449 | [
"Apache-2.0"
] | 2 | 2019-02-19T07:28:54.000Z | 2019-06-18T11:22:29.000Z | lib/backup_service_client/api/active_repository_api.py | sumedhpb/TAF | fc6f4cb8dc0b8234393f2e52a7b4a1aa723d9449 | [
"Apache-2.0"
] | 155 | 2018-11-13T14:57:07.000Z | 2022-03-28T11:53:22.000Z | # coding: utf-8
"""
Couchbase Backup Service API
This is REST API allows users to remotely schedule and run backups, restores and merges as well as to explore various archives for all there Couchbase Clusters. # noqa: E501
OpenAPI spec version: 0.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from backup_service_client.api_client import ApiClient
class ActiveRepositoryApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def cluster_self_repository_active_id_archive_post(self, id, **kwargs): # noqa: E501
"""Archives the repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_archive_post(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:param ArchiveRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cluster_self_repository_active_id_archive_post_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.cluster_self_repository_active_id_archive_post_with_http_info(id, **kwargs) # noqa: E501
return data
def cluster_self_repository_active_id_archive_post_with_http_info(self, id, **kwargs): # noqa: E501
"""Archives the repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_archive_post_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:param ArchiveRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cluster_self_repository_active_id_archive_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `cluster_self_repository_active_id_archive_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/cluster/self/repository/active/{id}/archive', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cluster_self_repository_active_id_backup_post(self, id, **kwargs): # noqa: E501
"""Triggers a one off backup for the specified repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_backup_post(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:param BackupRequest body:
:return: OneOffTaskResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cluster_self_repository_active_id_backup_post_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.cluster_self_repository_active_id_backup_post_with_http_info(id, **kwargs) # noqa: E501
return data
def cluster_self_repository_active_id_backup_post_with_http_info(self, id, **kwargs): # noqa: E501
"""Triggers a one off backup for the specified repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_backup_post_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:param BackupRequest body:
:return: OneOffTaskResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cluster_self_repository_active_id_backup_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `cluster_self_repository_active_id_backup_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/cluster/self/repository/active/{id}/backup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OneOffTaskResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cluster_self_repository_active_id_backups_backup_name_delete(self, id, backup_name, **kwargs): # noqa: E501
"""Deletes the backup # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_backups_backup_name_delete(id, backup_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:param str backup_name: The backup to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cluster_self_repository_active_id_backups_backup_name_delete_with_http_info(id, backup_name, **kwargs) # noqa: E501
else:
(data) = self.cluster_self_repository_active_id_backups_backup_name_delete_with_http_info(id, backup_name, **kwargs) # noqa: E501
return data
def cluster_self_repository_active_id_backups_backup_name_delete_with_http_info(self, id, backup_name, **kwargs): # noqa: E501
"""Deletes the backup # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_backups_backup_name_delete_with_http_info(id, backup_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:param str backup_name: The backup to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'backup_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cluster_self_repository_active_id_backups_backup_name_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `cluster_self_repository_active_id_backups_backup_name_delete`") # noqa: E501
# verify the required parameter 'backup_name' is set
if ('backup_name' not in params or
params['backup_name'] is None):
raise ValueError("Missing the required parameter `backup_name` when calling `cluster_self_repository_active_id_backups_backup_name_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'backup_name' in params:
path_params['backupName'] = params['backup_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/cluster/self/repository/active/{id}/backups/{backupName}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cluster_self_repository_active_id_merge_post(self, id, **kwargs): # noqa: E501
"""Triggers a one off merge for the specified repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_merge_post(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:param MergeRequest body:
:return: OneOffTaskResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cluster_self_repository_active_id_merge_post_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.cluster_self_repository_active_id_merge_post_with_http_info(id, **kwargs) # noqa: E501
return data
def cluster_self_repository_active_id_merge_post_with_http_info(self, id, **kwargs): # noqa: E501
"""Triggers a one off merge for the specified repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_merge_post_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:param MergeRequest body:
:return: OneOffTaskResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cluster_self_repository_active_id_merge_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `cluster_self_repository_active_id_merge_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/cluster/self/repository/active/{id}/merge', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OneOffTaskResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cluster_self_repository_active_id_pause_post(self, id, **kwargs): # noqa: E501
"""Pauses the active repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_pause_post(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cluster_self_repository_active_id_pause_post_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.cluster_self_repository_active_id_pause_post_with_http_info(id, **kwargs) # noqa: E501
return data
def cluster_self_repository_active_id_pause_post_with_http_info(self, id, **kwargs): # noqa: E501
"""Pauses the active repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_pause_post_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cluster_self_repository_active_id_pause_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `cluster_self_repository_active_id_pause_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/cluster/self/repository/active/{id}/pause', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cluster_self_repository_active_id_post(self, id, **kwargs): # noqa: E501
"""Creates an active repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_post(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:param CreateActiveRepositoryRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cluster_self_repository_active_id_post_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.cluster_self_repository_active_id_post_with_http_info(id, **kwargs) # noqa: E501
return data
def cluster_self_repository_active_id_post_with_http_info(self, id, **kwargs): # noqa: E501
"""Creates an active repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_post_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:param CreateActiveRepositoryRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cluster_self_repository_active_id_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `cluster_self_repository_active_id_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/cluster/self/repository/active/{id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cluster_self_repository_active_id_resume_post(self, id, **kwargs): # noqa: E501
"""Resumes the paused repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_resume_post(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cluster_self_repository_active_id_resume_post_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.cluster_self_repository_active_id_resume_post_with_http_info(id, **kwargs) # noqa: E501
return data
def cluster_self_repository_active_id_resume_post_with_http_info(self, id, **kwargs): # noqa: E501
"""Resumes the paused repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cluster_self_repository_active_id_resume_post_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: The repository ID. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cluster_self_repository_active_id_resume_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `cluster_self_repository_active_id_resume_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/cluster/self/repository/active/{id}/resume', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.982069 | 178 | 0.621554 | 3,399 | 28,987 | 5.005296 | 0.054134 | 0.044672 | 0.078998 | 0.101569 | 0.96103 | 0.957562 | 0.957562 | 0.949039 | 0.940222 | 0.93793 | 0 | 0.014306 | 0.291027 | 28,987 | 724 | 179 | 40.037293 | 0.813537 | 0.310967 | 0 | 0.819797 | 0 | 0 | 0.188292 | 0.080335 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038071 | false | 0 | 0.010152 | 0 | 0.104061 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bf69c76146d8ecd726f7a1240b43f4162922165d | 158,332 | py | Python | api/radiam/api/migrations/0021_iso_19115_metadata.py | usask-rc/radiam | bfa38fd33e211b66e30e453a717c5f216e848cb2 | [
"MIT"
] | 2 | 2020-02-01T20:41:28.000Z | 2020-02-03T20:57:59.000Z | api/radiam/api/migrations/0021_iso_19115_metadata.py | usask-rc/radiam | bfa38fd33e211b66e30e453a717c5f216e848cb2 | [
"MIT"
] | 10 | 2020-04-20T15:52:49.000Z | 2020-04-30T18:03:09.000Z | api/radiam/api/migrations/0021_iso_19115_metadata.py | usask-rc/radiam | bfa38fd33e211b66e30e453a717c5f216e848cb2 | [
"MIT"
] | null | null | null | # Insert Location Types and Group Roles into DB
from django.db import migrations, models
class Migration(migrations.Migration):
# Migration script to insert Datacite 4.2 metadata schema
dependencies = [
('api', '0020_exportrequest'),
]
operations = [
# Increase the size of the label
migrations.AlterField('Field', 'label', models.CharField(max_length=255)),
# Increase the size of the help
migrations.AlterField('Field', 'help', models.CharField(max_length=255)),
## ISO 19115
# Metadata Schema
migrations.RunSQL("INSERT INTO rdm_schemas (id, label) VALUES ('b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', 'en.metadata.iso.19115.2014');"),
# Metadata Fields
# 1 MD Metadata
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
null, \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'en.metadata.iso.19115.gmd.MD_Metadata', \
'en.metadata.iso.19115.gmd.MD_Metadata.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
17);"),
# 1.1 File Identifier
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'dbde189b-08fa-4443-9e84-0e334864a9d6', \
'en.metadata.iso.19115.gmd.fileIdentifier', \
'en.metadata.iso.19115.gmd.fileIdentifier.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.1.1 File Identifier -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'dbde189b-08fa-4443-9e84-0e334864a9d6', \
'd75b93b0-5922-431d-812b-300ce8193f46', \
'en.metadata.iso.19115.gmd.fileIdentifier.CharacterString', \
'en.metadata.iso.19115.gmd.fileIdentifier.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.2 Language
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'0fa1f953-2bd4-4470-be72-7e2793269e0f', \
'en.metadata.iso.19115.gmd.language', \
'en.metadata.iso.19115.gmd.language.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.2.1 Language -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0fa1f953-2bd4-4470-be72-7e2793269e0f', \
'6980e58f-9bcc-4f89-adfe-f0c2b83479c4', \
'en.metadata.iso.19115.gmd.language.CharacterString', \
'en.metadata.iso.19115.gmd.language.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.3 Characterset
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'1059c1f3-09be-4d68-9c6c-fd1c7e08073c', \
'en.metadata.iso.19115.gmd.characterSet', \
'en.metadata.iso.19115.gmd.characterSet.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
3);"),
# 1.3.1 Characterset -> CharacterSetCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'1059c1f3-09be-4d68-9c6c-fd1c7e08073c', \
'92c9916d-84bb-4142-b7d8-72963d0a7bae', \
'en.metadata.iso.19115.gmd.MD_CharacterSetCode', \
'en.metadata.iso.19115.gmd.MD_CharacterSetCode.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.3.1.1 Characterset -> CharacterSetCode -> codeList
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'92c9916d-84bb-4142-b7d8-72963d0a7bae', \
'901fd6ac-6b6f-43fb-a206-da1a91f533cc', \
'en.metadata.iso.19115.gmd.MD_CharacterSetCode.CharacterSetCode.codeList', \
'en.metadata.iso.19115.gmd.MD_CharacterSetCode.CharacterSetCode.codeList.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.3.1.2 Characterset -> CharacterSetCode -> codeListValue
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'92c9916d-84bb-4142-b7d8-72963d0a7bae', \
'fa951597-c0e3-44c5-bc5d-e1a9982bf1e5', \
'en.metadata.iso.19115.gmd.MD_CharacterSetCode.CharacterSetCode.codeListValue', \
'en.metadata.iso.19115.gmd.MD_CharacterSetCode.CharacterSetCode.codeListValue.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
2);"),
# 1.3.1.3 Characterset -> CharacterSetCode -> codeSpace
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'92c9916d-84bb-4142-b7d8-72963d0a7bae', \
'65586f6f-4501-4afb-8f5c-b53044b293c3', \
'en.metadata.iso.19115.gmd.MD_CharacterSetCode.CharacterSetCode.codeSpace', \
'en.metadata.iso.19115.gmd.MD_CharacterSetCode.CharacterSetCode.codeSpace.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
3);"),
# 1.4 hierarchyLevel
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'fab1f655-2546-4850-aa39-fd502854c29b', \
'en.metadata.iso.19115.gmd.hierarchyLevel', \
'en.metadata.iso.19115.gmd.hierarchyLevel.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
4);"),
# 1.4.1 hierarchyLevel -> MD_ScopeCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'fab1f655-2546-4850-aa39-fd502854c29b', \
'660bea3d-9c67-426a-94af-3e9eef2ac3ea', \
'en.metadata.iso.19115.gmd.hierarchyLevel.MD_ScopeCode', \
'en.metadata.iso.19115.gmd.hierarchyLevel.MD_ScopeCode.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.4.1.1 hierarchyLevel -> MD_ScopeCode -> codeList
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'660bea3d-9c67-426a-94af-3e9eef2ac3ea', \
'7dee6721-65e3-4451-968c-66668203fedd', \
'en.metadata.iso.19115.gmd.hierarchyLevel.MD_ScopeCode.codeList', \
'en.metadata.iso.19115.gmd.hierarchyLevel.MD_ScopeCode.codeList.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.4.1.2 hierarchyLevel -> MD_ScopeCode -> codeListValue
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'660bea3d-9c67-426a-94af-3e9eef2ac3ea', \
'8324d29d-3657-4a89-b05e-a27c7e42b88f', \
'en.metadata.iso.19115.gmd.hierarchyLevel.MD_ScopeCode.codeListValue', \
'en.metadata.iso.19115.gmd.hierarchyLevel.MD_ScopeCode.codeListValue.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
2);"),
# 1.4.1.3 hierarchyLevel -> MD_ScopeCode -> codeSpace
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'660bea3d-9c67-426a-94af-3e9eef2ac3ea', \
'1dc0d316-67f9-4af1-9dc3-65d03ca9830a', \
'en.metadata.iso.19115.gmd.hierarchyLevel.MD_ScopeCode.codeSpace', \
'en.metadata.iso.19115.gmd.hierarchyLevel.MD_ScopeCode.codeSpace.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
3);"),
# 1.5 contact
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'cbeb3e95-ad5e-42bc-b6bc-33daf2c807ea', \
'en.metadata.iso.19115.gmd.contact', \
'en.metadata.iso.19115.gmd.contact.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
5);"),
# 1.5.1 contact -> CI_ResponsibleParty
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'cbeb3e95-ad5e-42bc-b6bc-33daf2c807ea', \
'7620fe8f-4f03-46e3-b076-d4315da4b0f2', \
'en.metadata.iso.19115.gmd.CI_ResponsibleParty', \
'en.metadata.iso.19115.gmd.CI_ResponsibleParty.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.5.1.1 contact -> CI_ResponsibleParty -> individualName
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'7620fe8f-4f03-46e3-b076-d4315da4b0f2', \
'95e77b7c-75d3-4d76-8701-9e07a80498d9', \
'en.metadata.iso.19115.gmd.individualName', \
'en.metadata.iso.19115.gmd.individualName.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.5.1.1.1 contact -> CI_ResponsibleParty -> individualName -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'95e77b7c-75d3-4d76-8701-9e07a80498d9', \
'7a1eae98-94d9-4c8e-b2f1-faa455caad43', \
'en.metadata.iso.19115.gmd.individualName.CharacterString', \
'en.metadata.iso.19115.gmd.individualName.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.5.1.2 contact -> CI_ResponsibleParty -> organisationName
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'7620fe8f-4f03-46e3-b076-d4315da4b0f2', \
'970328f2-b13c-498e-b209-137994b5b04f', \
'en.metadata.iso.19115.gmd.organisationName', \
'en.metadata.iso.19115.gmd.organisationName.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.5.1.2.1 contact -> CI_ResponsibleParty -> organisationName -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'970328f2-b13c-498e-b209-137994b5b04f', \
'af20b968-e213-43d5-a8f9-33f7f571ffad', \
'en.metadata.iso.19115.gmd.organisationName.CharacterString', \
'en.metadata.iso.19115.gmd.organisationName.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.5.1.3 contact -> CI_ResponsibleParty -> contactInfo
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'7620fe8f-4f03-46e3-b076-d4315da4b0f2', \
'25a801b9-4507-4b4a-b6c5-8c4d8fbba2e1', \
'en.metadata.iso.19115.gmd.contactInfo', \
'en.metadata.iso.19115.gmd.contactInfo.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
3);"),
# 1.5.1.3.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'25a801b9-4507-4b4a-b6c5-8c4d8fbba2e1', \
'58a1c85d-5ea6-4f93-8c4d-0432f476ec96', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.5.1.3.1.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> phone
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'58a1c85d-5ea6-4f93-8c4d-0432f476ec96', \
'71c07e63-599d-40c8-9db8-61553523b998', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.phone', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.phone.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.5.1.3.1.1.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> phone -> CI_Telephone
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'71c07e63-599d-40c8-9db8-61553523b998', \
'a493b7cb-9d46-4fd5-8069-18951b7a5571', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.phone.CI_Telephone', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.phone.CI_Telephone.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.5.1.3.1.1.1.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> phone -> CI_Telephone -> voice
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'a493b7cb-9d46-4fd5-8069-18951b7a5571', \
'6417ca65-2be2-49c8-9af1-3f5059752738', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.phone.CI_Telephone.voice', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.phone.CI_Telephone.voice.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.5.1.3.1.1.1.1.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> phone -> CI_Telephone -> voice -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'6417ca65-2be2-49c8-9af1-3f5059752738', \
'6a8537d7-87dc-45b4-9bed-91727ce2ccf7', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.phone.CI_Telephone.voice.CharacterString', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.phone.CI_Telephone.voice.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.5.1.3.1.2 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'58a1c85d-5ea6-4f93-8c4d-0432f476ec96', \
'687adcef-511e-463a-83ec-7102a8be58f6', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.5.1.3.1.2.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'687adcef-511e-463a-83ec-7102a8be58f6', \
'638f46f7-0081-44d4-aacb-2efe36243c4c', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.5.1.3.1.2.1.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> deliveryPoint
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'638f46f7-0081-44d4-aacb-2efe36243c4c', \
'0da9ee62-5dd7-45a3-8383-a9e1049c883a', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.deliveryPoint', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.deliveryPoint.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.5.1.3.1.2.1.1.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> deliveryPoint -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0da9ee62-5dd7-45a3-8383-a9e1049c883a', \
'f931f4e2-d46f-43cc-a6ba-3fca22cb3fee', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.deliveryPoint.CharacterString', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.deliveryPoint.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.5.1.3.1.2.1.2 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> city
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0da9ee62-5dd7-45a3-8383-a9e1049c883a', \
'f0ccaebe-49ce-44c6-8883-93119ed71f7d', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.city', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.city.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.5.1.3.1.2.1.2.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> city -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'f0ccaebe-49ce-44c6-8883-93119ed71f7d', \
'199e3821-303a-429e-a67e-bdeb387eaf39', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.city.CharacterString', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.city.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.5.1.3.1.2.1.3 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> administrativeArea
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0da9ee62-5dd7-45a3-8383-a9e1049c883a', \
'9f9e96fa-f53a-44c6-b77c-f60c0c0ded24', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.administrativeArea', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.administrativeArea.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
3);"),
# 1.5.1.3.1.2.1.3.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> administrativeArea -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'9f9e96fa-f53a-44c6-b77c-f60c0c0ded24', \
'0f50b620-9fc3-464f-95c7-c684c5d63857', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.administrativeArea.CharacterString', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.administrativeArea.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
3);"),
# 1.5.1.3.1.2.1.4 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> postalCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0da9ee62-5dd7-45a3-8383-a9e1049c883a', \
'7886ceec-63f9-4c2d-8014-97ec9d153cad', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.postalCode', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.postalCode.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
4);"),
# 1.5.1.3.1.2.1.4.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> postalCode -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'7886ceec-63f9-4c2d-8014-97ec9d153cad', \
'ee7290ee-c1b2-4ddd-9000-f390af223491', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.postalCode.CharacterString', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.postalCode.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.5.1.3.1.2.1.5 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> country
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0da9ee62-5dd7-45a3-8383-a9e1049c883a', \
'dfc50a9e-4e42-445c-a0b8-1fcbc5d29a79', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.country', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.country.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
5);"),
# 1.5.1.3.1.2.1.5.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> country -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'dfc50a9e-4e42-445c-a0b8-1fcbc5d29a79', \
'25bb3293-d538-4149-adc2-177124aad88d', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.country.CharacterString', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.country.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.5.1.3.1.2.1.6 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> electronicMailAddress
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0da9ee62-5dd7-45a3-8383-a9e1049c883a', \
'f7c56dd0-bf94-4dca-b4b5-3b6bdc69dece', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.electronicMailAddress', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.electronicMailAddress.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
6);"),
# 1.5.1.3.1.2.1.6.1 contact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> electronicMailAddress -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'f7c56dd0-bf94-4dca-b4b5-3b6bdc69dece', \
'94bb3efd-8b4e-4b12-ac62-4d6455c353b8', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.electronicMailAddress.CharacterString', \
'en.metadata.iso.19115.gmd.contactInfo.CI_Contact.address.CI_Address.electronicMailAddress.CharacterString.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.5.1.4 contact -> CI_ResponsibleParty -> role
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'7620fe8f-4f03-46e3-b076-d4315da4b0f2', \
'92aadbc6-f3f9-4d4a-b676-af00cd1c8202', \
'en.metadata.iso.19115.gmd.role', \
'en.metadata.iso.19115.gmd.role.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
4);"),
# 1.5.1.4.1 contact -> CI_ResponsibleParty -> role -> CI_RoleCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'92aadbc6-f3f9-4d4a-b676-af00cd1c8202', \
'41f5031a-d58c-4c86-9a05-e96feae60b85', \
'en.metadata.iso.19115.gmd.role.CI_RoleCode', \
'en.metadata.iso.19115.gmd.role.CI_RoleCode.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.5.1.4.1.1 contact -> CI_ResponsibleParty -> role -> CI_RoleCode -> codeList
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'41f5031a-d58c-4c86-9a05-e96feae60b85', \
'7c749c85-c33e-4dd3-813b-f072ca494658', \
'en.metadata.iso.19115.gmd.role.CI_RoleCode.codeList', \
'en.metadata.iso.19115.gmd.role.CI_RoleCode.codeList.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.5.1.4.1.2 contact -> CI_ResponsibleParty -> role -> CI_RoleCode -> codeListValue
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'41f5031a-d58c-4c86-9a05-e96feae60b85', \
'ea987049-ef65-43e6-a6f2-4ed6d83cfa9c', \
'en.metadata.iso.19115.gmd.role.CI_RoleCode.codeListValue', \
'en.metadata.iso.19115.gmd.role.CI_RoleCode.codeListValue.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
2);"),
# 1.5.1.4.1.3 contact -> CI_ResponsibleParty -> role -> CI_RoleCode -> codeSpace
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'41f5031a-d58c-4c86-9a05-e96feae60b85', \
'277e6745-152b-42f0-afb3-18945a403114', \
'en.metadata.iso.19115.gmd.role.CI_RoleCode.codeSpace', \
'en.metadata.iso.19115.gmd.role.CI_RoleCode.codeSpace.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
3);"),
# 1.6 dateStamp
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'54f2d59d-748c-4305-ae25-e8568dd732b2', \
'en.metadata.iso.19115.gmd.dateStamp', \
'en.metadata.iso.19115.gmd.dateStamp.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
6);"),
# 1.6.1 dateStamp -> Date
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'54f2d59d-748c-4305-ae25-e8568dd732b2', \
'fb1bb099-c1b7-47e9-b526-516fccade8fe', \
'en.metadata.iso.19115.gmd.dateStamp.Date', \
'en.metadata.iso.19115.gmd.dateStamp.Date.help', \
'00a8ffb6-0124-4ec1-846e-e27fd3c7ae03', \
'b2e7ecd5-2fe8-4c8b-b063-624e2162f262', \
False, \
null, \
1);"),
# 1.7 metadataStandardName
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'db5cf0d0-11b3-44d3-b67d-07e91cc58c8f', \
'en.metadata.iso.19115.gmd.metadataStandardName', \
'en.metadata.iso.19115.gmd.metadataStandardName.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
7);"),
# 1.7.1 metadataStandardName -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'db5cf0d0-11b3-44d3-b67d-07e91cc58c8f', \
'e73966b4-593a-4a4a-8bd8-152fc047f45d', \
'en.metadata.iso.19115.gmd.metadataStandardName.CharacterString', \
'en.metadata.iso.19115.gmd.metadataStandardName.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.8 metadataStandardVersion
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'71e6a92d-84a4-4eed-b81c-52fd62216619', \
'en.metadata.iso.19115.gmd.metadataStandardVersion', \
'en.metadata.iso.19115.gmd.metadataStandardVersion.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
8);"),
# 1.8.1 metadataStandardVersion -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'71e6a92d-84a4-4eed-b81c-52fd62216619', \
'ce8500b6-fcfe-4482-8206-1efb060a1874', \
'en.metadata.iso.19115.gmd.metadataStandardVersion.CharacterString', \
'en.metadata.iso.19115.gmd.metadataStandardVersion.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.9 dataSetURI
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'46b65de9-7906-4ce0-9fb3-a4df36992a74', \
'en.metadata.iso.19115.gmd.dataSetURI', \
'en.metadata.iso.19115.gmd.dataSetURI.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
9);"),
# 1.9.1 dataSetURI -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'46b65de9-7906-4ce0-9fb3-a4df36992a74', \
'baacf7a3-47c0-4cc1-9b3d-088a3b4b82d1', \
'en.metadata.iso.19115.gmd.dataSetURI.CharacterString', \
'en.metadata.iso.19115.gmd.dataSetURI.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10 identificationInfo
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'82a940a8-9152-47bd-95f6-3cfbc2ce9901', \
'78774c4c-250e-4d1f-b406-6be263022ad7', \
'en.metadata.iso.19115.gmd.identificationInfo', \
'en.metadata.iso.19115.gmd.identificationInfo.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
10);"),
# 1.10.1 identificationInfo -> MD_DataIdentification
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'78774c4c-250e-4d1f-b406-6be263022ad7', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.1 identificationInfo -> MD_DataIdentification -> citation
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'aaf1d959-0f49-40f1-ad19-3e13076587be', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.citation', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.citation.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.1.1 identificationInfo -> MD_DataIdentification -> citation -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'aaf1d959-0f49-40f1-ad19-3e13076587be', \
'671c5b6e-e740-489f-a2cb-7b6628c43fab', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.citation.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.citation.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
True, \
null, \
1);"),
# 1.10.1.2 identificationInfo -> MD_DataIdentification -> abstract
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'f6e04925-83a2-4b7d-bf72-e00a248866dd', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.abstract', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.abstract.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.10.1.2.1 identificationInfo -> MD_DataIdentification -> abstract -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'f6e04925-83a2-4b7d-bf72-e00a248866dd', \
'ea4f5ef3-4b0e-4c63-941d-eee2ee4f7eb0', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.abstract.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.abstract.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.3 identificationInfo -> MD_DataIdentification -> purpose
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'db55f75a-771a-4c00-ab68-e239d28ee3d5', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.purpose', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.purpose.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
3);"),
# 1.10.1.3.1 identificationInfo -> MD_DataIdentification -> purpose -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'db55f75a-771a-4c00-ab68-e239d28ee3d5', \
'37413041-932b-4888-8b5b-94442b63fe6b', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.purpose.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.purpose.CharacterString,help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.4 identificationInfo -> MD_DataIdentification -> status
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'6b2a2575-41d5-4d3d-a76c-348b2e809a42', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.status', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.status.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
4);"),
# 1.10.1.4.1 identificationInfo -> MD_DataIdentification -> status -> MD_ProgressCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'6b2a2575-41d5-4d3d-a76c-348b2e809a42', \
'4cf5f746-6e7e-4fb1-847f-e2de10d07305', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.status.MD_ProgressCode', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.status.MD_ProgressCode.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.4.1.1 identificationInfo -> MD_DataIdentification -> status -> MD_ProgressCode -> codeList
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'4cf5f746-6e7e-4fb1-847f-e2de10d07305', \
'6f6512ad-aa3b-48fb-961c-7ab09cb5083c', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.status.MD_ProgressCode.codeList', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.status.MD_ProgressCode.codeList.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.4.1.2 identificationInfo -> MD_DataIdentification -> status -> MD_ProgressCode -> codeListValue
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'4cf5f746-6e7e-4fb1-847f-e2de10d07305', \
'28358e27-f310-4292-b0bb-02e7ee7a9179', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.status.MD_ProgressCode.codeListValue', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.status.MD_ProgressCode..help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
2);"),
# 1.10.1.4.1.3 identificationInfo -> MD_DataIdentification -> status -> MD_ProgressCode -> codeSpace
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'4cf5f746-6e7e-4fb1-847f-e2de10d07305', \
'1a426580-daad-44db-b1b8-7d32892b59c8', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.status.MD_ProgressCode.codeSpace', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.status.MD_ProgressCode.codeSpace.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
3);"),
# 1.10.1.5 identificationInfo -> MD_DataIdentification -> pointOfContact
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'4965e36a-623f-4141-83f5-75f5624090b6', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
5);"),
# 1.10.1.5.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'4965e36a-623f-4141-83f5-75f5624090b6', \
'f1083c08-f3c7-4dd6-a31d-65f99356ff6c', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
1);"),
# 1.10.1.5.1.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> individualName
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'f1083c08-f3c7-4dd6-a31d-65f99356ff6c', \
'a2091851-67be-4ca5-a613-bece6a5a0694', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.individualName', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.individualName.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.5.1.1.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> individualName -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'a2091851-67be-4ca5-a613-bece6a5a0694', \
'5b95bdee-e3ac-4105-8115-d18f564f5a9a', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.individualName.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.individualName.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.2 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> organisationName
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'f1083c08-f3c7-4dd6-a31d-65f99356ff6c', \
'91614cec-9fb5-4613-a1a8-212e0083d42c', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.organisationName', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.organisationName.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.10.1.5.1.2.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> organisationName -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'91614cec-9fb5-4613-a1a8-212e0083d42c', \
'75bd334a-0d87-4322-82ee-804628a03a59', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.organisationName.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.organisationName.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.3 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'f1083c08-f3c7-4dd6-a31d-65f99356ff6c', \
'8a1dfd5e-ff29-4767-8b12-7e82bda7e446', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
3);"),
# 1.10.1.5.1.3.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'8a1dfd5e-ff29-4767-8b12-7e82bda7e446', \
'06e753bc-236f-4009-ac63-a93f0f69af35', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> phone
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'06e753bc-236f-4009-ac63-a93f0f69af35', \
'cbf47a78-dfde-4dde-b43e-fd05e33885b4', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.phone', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.phone.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.1.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> phone -> CI_Telephone
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'cbf47a78-dfde-4dde-b43e-fd05e33885b4', \
'406618be-85c0-4edb-9510-a4410d611a26', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.phone.CI_Telephone', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.phone.CI_Telephone.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.1.1.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> phone -> CI_Telephone -> voice
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'406618be-85c0-4edb-9510-a4410d611a26', \
'882c4b64-e00b-4de4-91ee-4648fd0103e4', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.phone.CI_Telephone.voice', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.phone.CI_Telephone.voice.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.1.1.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> phone -> CI_Telephone -> voice -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'882c4b64-e00b-4de4-91ee-4648fd0103e4', \
'38fc73a2-9003-4703-b3f0-e8699cd54e80', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.phone.CI_Telephone.voice.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.phone.CI_Telephone.voice.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.2 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'06e753bc-236f-4009-ac63-a93f0f69af35', \
'0f295fd7-4753-4bf8-9847-72d1c3326960', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.10.1.5.1.3.1.2.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0f295fd7-4753-4bf8-9847-72d1c3326960', \
'0510eb20-8e0d-4d77-a204-f36a9c336868', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.2.1.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> deliveryPoint
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0510eb20-8e0d-4d77-a204-f36a9c336868', \
'8dd94d36-9f3b-4e29-8018-52a7bd394eea', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.deliveryPoint', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.deliveryPoint.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.2.1.1.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> deliveryPoint -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'8dd94d36-9f3b-4e29-8018-52a7bd394eea', \
'6f8a9fe0-d86e-4436-9d7d-8277fb65717a', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.deliveryPoint.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.deliveryPoint.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.2.1.2 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> city
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0510eb20-8e0d-4d77-a204-f36a9c336868', \
'07351123-f6cc-4654-8622-c848610fc8bb', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.city', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.city.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.10.1.5.1.3.1.2.1.2.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> city -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'07351123-f6cc-4654-8622-c848610fc8bb', \
'a2b5492e-4493-424f-8c85-deb4b3bcf274', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.city.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.city.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.2.1.3 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> administrativeArea
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0510eb20-8e0d-4d77-a204-f36a9c336868', \
'5a10be99-f0f9-41e1-abca-81ab212d227c', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.administrativeArea', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.administrativeArea.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
3);"),
# 1.10.1.5.1.3.1.2.1.3.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> administrativeArea -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'5a10be99-f0f9-41e1-abca-81ab212d227c', \
'020586b3-9517-4f6c-8e6f-eb56535a0cda', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.administrativeArea.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.administrativeArea.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.2.1.4 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> postalCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0510eb20-8e0d-4d77-a204-f36a9c336868', \
'12f34b44-8246-4bfb-9e5c-cf62dc8517d6', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.postalCode', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.postalCode.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
4);"),
# 1.10.1.5.1.3.1.2.1.4.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> postalCode -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'12f34b44-8246-4bfb-9e5c-cf62dc8517d6', \
'971b15c6-115e-45bd-9fae-f7ad03e44e01', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.postalCode.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.postalCode.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.2.1.5 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> country
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0510eb20-8e0d-4d77-a204-f36a9c336868', \
'c66d71e2-4fee-4dd8-a4a9-dcfd4366072e', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.country', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.country.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
5);"),
# 1.10.1.5.1.3.1.2.1.5.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> country -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'c66d71e2-4fee-4dd8-a4a9-dcfd4366072e', \
'086dcf08-4ec2-492d-a801-f0a07eb6bed3', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.country.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.country.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.2.1.6 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> electronicMailAddress
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'0510eb20-8e0d-4d77-a204-f36a9c336868', \
'a0062d3e-b974-4793-b75b-c3e2694c9f3b', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.electronicMailAddress', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.electronicMailAddress.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
6);"),
# 1.10.1.5.1.3.1.2.1.6.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> address -> CI_Address -> electronicMailAddress -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'a0062d3e-b974-4793-b75b-c3e2694c9f3b', \
'4c95a8ff-81cd-4a94-9aeb-4dbc35ff2506', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.electronicMailAddress.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.address.CI_Address.electronicMailAddress.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.3 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> onlineResource
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'06e753bc-236f-4009-ac63-a93f0f69af35', \
'4e5f7363-9663-41b3-9ce4-0b8a09c16130', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.onlineResource', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.onlineResource.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
3);"),
# 1.10.1.5.1.3.1.3.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> onlineResource -> linkage
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'4e5f7363-9663-41b3-9ce4-0b8a09c16130', \
'6d4957ee-9b8b-4d3f-a690-054b5e0f56c8', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.onlineResource.linkage', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.onlineResource.linkage.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.3.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> onlineResource -> linkage -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'6d4957ee-9b8b-4d3f-a690-054b5e0f56c8', \
'75b4ea9b-9810-4616-940b-9683b1238a1f', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.onlineResource.linkage.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.onlineResource.linkage.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.3.1.3.2 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> onlineResource -> linkage
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'4e5f7363-9663-41b3-9ce4-0b8a09c16130', \
'b81edc9c-4d9e-4a55-a4ff-a03631800e08', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.onlineResource.protocol', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.onlineResource.protocol.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.10.1.5.1.3.1.3.2.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> contactInfo -> CI_Contact -> onlineResource -> linkage -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'b81edc9c-4d9e-4a55-a4ff-a03631800e08', \
'ded583a4-d746-42a1-950e-f572f6374af7', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.onlineResource.protocol.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.contactInfo.CI_Contact.onlineResource.protocol.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.4 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> role
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'f1083c08-f3c7-4dd6-a31d-65f99356ff6c', \
'681d0205-92bd-4169-9d99-73be645b6699', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.role', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.role.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
4);"),
# 1.10.1.5.1.4.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> role -> CI_RoleCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'681d0205-92bd-4169-9d99-73be645b6699', \
'fa28d542-2baf-4eaa-8c41-ddada3ea598a', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.role.CI_RoleCode', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.role.CI_RoleCode.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.4.1.1 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> role -> CI_RoleCode -> codeList
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'fa28d542-2baf-4eaa-8c41-ddada3ea598a', \
'2f0cea94-fad8-4202-aa2c-c7e405654f85', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.role.CI_RoleCode.codeList', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.role.CI_RoleCode.codeList.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.5.1.4.1.2 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> role -> CI_RoleCode -> codeListValue
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'fa28d542-2baf-4eaa-8c41-ddada3ea598a', \
'7bb7872f-cac4-403f-b68f-855155018383', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.role.CI_RoleCode.codeListValue', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.role.CI_RoleCode.codeListValuehelp', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
2);"),
# 1.10.1.5.1.4.1.3 identificationInfo -> MD_DataIdentification -> pointOfContact -> CI_ResponsibleParty -> role -> CI_RoleCode -> codeSpace
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'fa28d542-2baf-4eaa-8c41-ddada3ea598a', \
'a1c44f59-ce90-49f0-8ea5-374afa99c439', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.role.CI_RoleCode.codeSpace', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.pointOfContact.CI_ResponsibleParty.role.CI_RoleCode.codeSpace.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
3);"),
# 1.10.1.6 identificationInfo -> MD_DataIdentification -> descriptiveKeywords
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'258680d8-c753-4083-9a28-30839c44adee', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
6);"),
# 1.10.1.6.1 identificationInfo -> MD_DataIdentification -> descriptiveKeywords -> MD_Keywords
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'258680d8-c753-4083-9a28-30839c44adee', \
'378ba8c7-f6a4-480e-880f-9c4c5609a64b', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
1);"),
# 1.10.1.6.1.1 identificationInfo -> MD_DataIdentification -> descriptiveKeywords -> MD_Keywords -> keyword
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'378ba8c7-f6a4-480e-880f-9c4c5609a64b', \
'6cb535e3-739b-44c9-892a-979e48c3df86', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.keyword', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.keyword.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
1);"),
# 1.10.1.6.1.1.1 identificationInfo -> MD_DataIdentification -> descriptiveKeywords -> MD_Keywords -> keyword -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'6cb535e3-739b-44c9-892a-979e48c3df86', \
'8c47d4c5-bec3-43e6-bf7c-8c2b3d3bb8db', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.keyword.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.keyword.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.6.1.2 identificationInfo -> MD_DataIdentification -> descriptiveKeywords -> MD_Keywords -> type
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'378ba8c7-f6a4-480e-880f-9c4c5609a64b', \
'b58ca5ea-cacb-41ba-aad5-0e95dfe762cc', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.type', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.type.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.10.1.6.1.2.1 identificationInfo -> MD_DataIdentification -> descriptiveKeywords -> MD_Keywords -> type -> MD_KeywordTypeCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'b58ca5ea-cacb-41ba-aad5-0e95dfe762cc', \
'df8b51c8-cef4-42b4-bae0-dee3c5652019', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.type.MD_KeywordTypeCode', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.type.MD_KeywordTypeCode.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.6.1.2.1.1 identificationInfo -> MD_DataIdentification -> descriptiveKeywords -> MD_Keywords -> type -> MD_KeywordTypeCode -> codeList
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'df8b51c8-cef4-42b4-bae0-dee3c5652019', \
'bbafadf5-2e6e-4d74-b4c2-6a17773a6251', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.type.MD_KeywordTypeCode.codeList', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.type.MD_KeywordTypeCode.codeList.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.6.1.2.1.2 identificationInfo -> MD_DataIdentification -> descriptiveKeywords -> MD_Keywords -> type -> MD_KeywordTypeCode -> codeListValue
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'df8b51c8-cef4-42b4-bae0-dee3c5652019', \
'ae72ed43-7812-4ff2-9ffb-3e689f6790cf', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.type.MD_KeywordTypeCode.codeListValue', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.type.MD_KeywordTypeCode.codeListValue.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
2);"),
# 1.10.1.6.1.2.1.3 identificationInfo -> MD_DataIdentification -> descriptiveKeywords -> MD_Keywords -> type -> MD_KeywordTypeCode -> codeSpace
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'df8b51c8-cef4-42b4-bae0-dee3c5652019', \
'a510a2a9-e85c-4ba0-a3e9-991120f6f37e', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.type.MD_KeywordTypeCode.codeSpace', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.type.MD_KeywordTypeCode.codeSpace.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
3);"),
# 1.10.1.6.1.3 identificationInfo -> MD_DataIdentification -> descriptiveKeywords -> MD_Keywords -> thesaurusName
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'378ba8c7-f6a4-480e-880f-9c4c5609a64b', \
'6a1d1b75-fb64-494c-99d6-ed569929cc2f', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.thesaurusName', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.descriptiveKeywords.MD_Keywords.thesaurusName.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
3);"),
# 1.10.1.7 identificationInfo -> MD_DataIdentification -> resourceConstraints
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'c479021a-d64d-419c-b3d2-d0c8b5dc9458', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
7);"),
# 1.10.1.7.1 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'c479021a-d64d-419c-b3d2-d0c8b5dc9458', \
'586a202d-9e97-4215-8efc-f7b9356d90da', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.7.1.1 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> accessConstraints
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'586a202d-9e97-4215-8efc-f7b9356d90da', \
'5942b0e2-04d9-495c-bc0a-f7cbf95c93c2', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.accessConstraints', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.accessConstraints.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.7.1.1.1 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> accessConstraints -> MD_RestrictionCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'5942b0e2-04d9-495c-bc0a-f7cbf95c93c2', \
'2626accb-5a6c-4c5e-9638-3dd577d8bb11', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.accessConstraints.MD_RestrictionCode', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.accessConstraints.MD_RestrictionCode.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.7.1.1.1.1 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> accessConstraints -> MD_RestrictionCode -> codeList
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'2626accb-5a6c-4c5e-9638-3dd577d8bb11', \
'4ec2731c-8908-4318-a1df-e87dea49a1f1', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.accessConstraints.MD_RestrictionCode.codeList', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.accessConstraints.MD_RestrictionCode.codeList.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.7.1.1.1.2 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> accessConstraints -> MD_RestrictionCode -> codeListValue
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'2626accb-5a6c-4c5e-9638-3dd577d8bb11', \
'3e5cc97d-483e-494f-9a53-7a9d5420aa56', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.accessConstraints.MD_RestrictionCode.codeListValue', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.accessConstraints.MD_RestrictionCode.codeListValue.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
2);"),
# 1.10.1.7.1.1.1.3 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> accessConstraints -> MD_RestrictionCode -> codeSpace
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'2626accb-5a6c-4c5e-9638-3dd577d8bb11', \
'3c3d2e33-9f5f-4e8e-b62d-4db242d7719c', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.accessConstraints.MD_RestrictionCode.codeSpace', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.accessConstraints.MD_RestrictionCode.codeSpace.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
3);"),
# 1.10.1.7.1.2 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> useConstraints
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'586a202d-9e97-4215-8efc-f7b9356d90da', \
'78729779-d7b2-4377-99dd-1ddc88701678', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.useConstraints', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.useConstraints.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.10.1.7.1.2.1 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> useConstraints -> MD_RestrictionCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'78729779-d7b2-4377-99dd-1ddc88701678', \
'91270f06-9305-4670-ba56-d24e1dd382fd', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.useConstraints.MD_RestrictionCode', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.useConstraints.MD_RestrictionCode.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.7.1.2.1.1 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> useConstraints -> MD_RestrictionCode -> codeList
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'91270f06-9305-4670-ba56-d24e1dd382fd', \
'87a303fb-5eff-4046-81ed-457c7c1b5bff', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.useConstraints.MD_RestrictionCode.codeList', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.useConstraints.MD_RestrictionCode.codeList.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.7.1.2.1.2 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> useConstraints -> MD_RestrictionCode -> codeListValue
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'91270f06-9305-4670-ba56-d24e1dd382fd', \
'0d0e04d8-9d5f-4f97-8c89-823f106f8994', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.useConstraints.MD_RestrictionCode.codeListValue', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.useConstraints.MD_RestrictionCode.codeListValue.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
2);"),
# 1.10.1.7.1.2.1.3 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> useConstraints -> MD_RestrictionCode -> codeSpace
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'91270f06-9305-4670-ba56-d24e1dd382fd', \
'f03572ff-ea71-4651-91e7-5552773cb0e1', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.useConstraints.MD_RestrictionCode.codeSpace', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.useConstraints.MD_RestrictionCode.codeSpace.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
3);"),
# 1.10.1.7.1.3 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> otherConstraints
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'586a202d-9e97-4215-8efc-f7b9356d90da', \
'152296d2-cdda-477d-bb5a-10b89d69bb21', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.otherConstraints', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.otherConstraints.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
3);"),
# 1.10.1.7.1.3.1 identificationInfo -> MD_DataIdentification -> resourceConstraints -> MD_LegalConstraints -> otherConstraints -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'152296d2-cdda-477d-bb5a-10b89d69bb21', \
'60927576-7113-4037-8746-3ba8e81b71e1', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.otherConstraints.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.resourceConstraints.MD_LegalConstraints.otherConstraints.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.8 identificationInfo -> MD_DataIdentification -> language
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'a5001044-a34c-4458-9ce3-2055078ce68d', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.language', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.language.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
8);"),
# 1.10.1.8.1 identificationInfo -> MD_DataIdentification -> language -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'a5001044-a34c-4458-9ce3-2055078ce68d', \
'74dd1e36-d54d-4740-b535-fe5829e03b28', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.language.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.language.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
True, \
null, \
1);"),
# 1.10.1.9 identificationInfo -> MD_DataIdentification -> topicCategory
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'd2f4a6c0-5927-4e4e-9491-96f85635eeb7', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.topicCategory', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.topicCategory.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
9);"),
# 1.10.1.9.1 identificationInfo -> MD_DataIdentification -> topicCategory -> MD_TopicCategoryCode
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'd2f4a6c0-5927-4e4e-9491-96f85635eeb7', \
'8fd5dd16-8705-4664-8a3c-7b3fe508be47', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.topicCategory.MD_TopicCategoryCode', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.topicCategory.MD_TopicCategoryCode.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
True, \
null, \
1);"),
# 1.10.1.10 identificationInfo -> MD_DataIdentification -> extent
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'e7164399-396b-4cd3-b496-31c3242406ce', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
10);"),
# 1.10.1.10.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'e7164399-396b-4cd3-b496-31c3242406ce', \
'67b596af-e458-4cad-8dda-7d77646e2d4c', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
1);"),
# 1.10.1.10.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> id
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'67b596af-e458-4cad-8dda-7d77646e2d4c', \
'565377d3-578f-4074-b133-79a424aceb88', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.id', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.id.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.10.1.2 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'67b596af-e458-4cad-8dda-7d77646e2d4c', \
'c0fa731c-73aa-45c7-bd06-86da2f6a283d', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
2);"),
# 1.10.1.10.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_GeographicBoundingBox
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'c0fa731c-73aa-45c7-bd06-86da2f6a283d', \
'd5ba5379-42d5-4ebb-9e8d-7b8f843f4135', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
1);"),
# 1.10.1.10.1.1.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_GeographicBoundingBox -> id
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'd5ba5379-42d5-4ebb-9e8d-7b8f843f4135', \
'eae5c5d3-3b66-4821-83f0-cf95ae2f9510', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.id', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.id.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.10.1.1.1.2 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_GeographicBoundingBox -> westBoundLongitude
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'd5ba5379-42d5-4ebb-9e8d-7b8f843f4135', \
'cd16d98d-51a4-448f-98e4-ec335c8be22c', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.westBoundLongitude', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.westBoundLongitude.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
2);"),
# 1.10.1.10.1.1.1.2.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_GeographicBoundingBox -> westBoundLongitude -> Decimal
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'cd16d98d-51a4-448f-98e4-ec335c8be22c', \
'033a1642-5a5b-4150-af4f-d8791171d63f', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.westBoundLongitude.Decimal', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.westBoundLongitude.Decimal.help', \
'4de681a6-0462-41bf-8151-8d58e047b67e', \
'3b0f0c91-f5fd-4d5c-aaa8-40a61d822e97', \
False, \
null, \
2);"),
# 1.10.1.10.1.1.1.3 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_GeographicBoundingBox -> eastBoundLongitude
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'd5ba5379-42d5-4ebb-9e8d-7b8f843f4135', \
'38a69b85-5a49-4a85-a9ae-3114323ba2cb', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.eastBoundLongitude', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.eastBoundLongitude.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
3);"),
# 1.10.1.10.1.1.1.3.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_GeographicBoundingBox -> eastBoundLongitude -> Decimal
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'38a69b85-5a49-4a85-a9ae-3114323ba2cb', \
'99235e74-e4d7-4793-9dbb-422eafcc7e14', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.eastBoundLongitude.Decimal', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.eastBoundLongitude.Decimal.help', \
'4de681a6-0462-41bf-8151-8d58e047b67e', \
'3b0f0c91-f5fd-4d5c-aaa8-40a61d822e97', \
False, \
null, \
1);"),
# 1.10.1.10.1.1.1.4 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_GeographicBoundingBox -> southBoundLatitude
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'd5ba5379-42d5-4ebb-9e8d-7b8f843f4135', \
'62c42b4b-6165-465c-83c3-d7393c090110', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.southBoundLatitude', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.southBoundLatitude.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
4);"),
# 1.10.1.10.1.1.1.4.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_GeographicBoundingBox -> southBoundLatitude -> Decimal
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'62c42b4b-6165-465c-83c3-d7393c090110', \
'078b6022-0695-429c-80f6-bc03ec11f90e', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.southBoundLatitude.Decimal', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.southBoundLatitude.Decimal.help', \
'4de681a6-0462-41bf-8151-8d58e047b67e', \
'3b0f0c91-f5fd-4d5c-aaa8-40a61d822e97', \
False, \
null, \
1);"),
# 1.10.1.10.1.1.1.5 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_GeographicBoundingBox -> northBoundLatitude
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'd5ba5379-42d5-4ebb-9e8d-7b8f843f4135', \
'647811d6-f323-4e81-ba89-45d6fd707d32', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.northBoundLatitude', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.northBoundLatitude.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
5);"),
# 1.10.1.10.1.1.1.5.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_GeographicBoundingBox -> northBoundLatitude -> Decimal
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'647811d6-f323-4e81-ba89-45d6fd707d32', \
'9e20a8a6-4507-4ec7-b6df-2648eb5a9ca1', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.northBoundLatitude.Decimal', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_GeographicBoundingBox.northBoundLatitude.Decimal.help', \
'4de681a6-0462-41bf-8151-8d58e047b67e', \
'3b0f0c91-f5fd-4d5c-aaa8-40a61d822e97', \
False, \
null, \
1);"),
# 1.10.1.10.1.2 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'c0fa731c-73aa-45c7-bd06-86da2f6a283d', \
'77099c44-ac1a-4310-bbd5-a65b510c5388', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
2);"),
# 1.10.1.10.1.2.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon -> polygon
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'77099c44-ac1a-4310-bbd5-a65b510c5388', \
'e25f566f-1258-495e-b47c-1964b0ffd74f', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
1);"),
# 1.10.1.10.1.2.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon -> polygon -> Point
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'e25f566f-1258-495e-b47c-1964b0ffd74f', \
'3e6e0bdd-cd55-469a-b64b-de2e40fdda88', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Point', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Point.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
1);"),
# 1.10.1.10.1.2.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon -> polygon -> Point -> coordinates
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'3e6e0bdd-cd55-469a-b64b-de2e40fdda88', \
'e9619ca3-f180-4712-aa3b-2790699a29db', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Point.coordinates', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Point.coordinates.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.10.1.2.2 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon -> polygon -> Polygon
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'e25f566f-1258-495e-b47c-1964b0ffd74f', \
'cfdac22b-7558-4eb0-be74-377ac283a616', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
2);"),
# 1.10.1.10.1.2.2.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon -> polygon -> Polygon -> exterior
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'cfdac22b-7558-4eb0-be74-377ac283a616', \
'422f06df-32be-4cf4-9f5e-dc3edbeca991', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.exterior', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.exterior.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.10.1.2.2.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon -> polygon -> Polygon -> exterior -> LinearRing
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'422f06df-32be-4cf4-9f5e-dc3edbeca991', \
'24b63491-7f0a-428e-ad11-6a4643657fb7', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.exterior.LinearRing', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.exterior.LinearRing.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.10.1.2.2.1.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon -> polygon -> Polygon -> exterior -> LinearRing -> coordinates
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'24b63491-7f0a-428e-ad11-6a4643657fb7', \
'dc3c20f9-0b6e-455f-879b-e03d3f6e4086', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.exterior.LinearRing.coordinates', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.exterior.LinearRing.coordinates.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.10.1.2.2.2.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon -> polygon -> Polygon -> interior
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'cfdac22b-7558-4eb0-be74-377ac283a616', \
'4421e53e-42f4-49ee-b206-a474a51f305f', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.interior', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.interior.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.10.1.2.2.2.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon -> polygon -> Polygon -> interior -> LinearRing
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'4421e53e-42f4-49ee-b206-a474a51f305f', \
'406dec1c-2812-4e66-a71e-dd763f1e7504', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.interior.LinearRing', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.interior.LinearRing.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.10.1.2.2.2.1.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> geographicElement -> EX_BoundingPolygon -> polygon -> Polygon -> interior -> LinearRing -> coordinates
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'406dec1c-2812-4e66-a71e-dd763f1e7504', \
'3f83e053-76a9-4f7b-aac8-053335ca880e', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.interior.LinearRing.coordinates', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.geographicElement.EX_BoundingPolygon.polygon.Polygon.interior.LinearRing.coordinates.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.10.1.3 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> temporalElement
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'67b596af-e458-4cad-8dda-7d77646e2d4c', \
'2e14a90b-0e59-41cf-b0e4-79d638931dc6', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
3);"),
# 1.10.1.10.1.3.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> temporalElement -> EX_TemporalExtent
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'2e14a90b-0e59-41cf-b0e4-79d638931dc6', \
'392679b8-d426-4c74-8a5f-80ec53d31b69', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.10.1.3.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> temporalElement -> EX_TemporalExtent -> extent
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'392679b8-d426-4c74-8a5f-80ec53d31b69', \
'6f616be3-98db-4153-b896-2955d4125c40', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
1);"),
# 1.10.1.10.1.3.1.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> temporalElement -> EX_TemporalExtent -> extent -> TimePeriod
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'6f616be3-98db-4153-b896-2955d4125c40', \
'83acc232-11bd-4cbe-a442-9e972524b969', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.TimePeriod', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.TimePeriod.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
True, \
null, \
1);"),
# 1.10.1.10.1.3.1.1.1.1 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> temporalElement -> EX_TemporalExtent -> extent -> TimePeriod -> id
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'83acc232-11bd-4cbe-a442-9e972524b969', \
'4bd2b2ad-7cc1-4955-b1f3-09900310ce19', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.TimePeriod.id', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.TimePeriod.id.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
1);"),
# 1.10.1.10.1.3.1.1.1.2 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> temporalElement -> EX_TemporalExtent -> extent -> TimePeriod -> description
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'83acc232-11bd-4cbe-a442-9e972524b969', \
'b23176c0-3415-4604-8c86-5bfdf20dda04', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.TimePeriod.description', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.TimePeriod.description.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
2);"),
# 1.10.1.10.1.3.1.1.1.3 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> temporalElement -> EX_TemporalExtent -> extent -> TimePeriod -> beginPosition
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'83acc232-11bd-4cbe-a442-9e972524b969', \
'844525ea-8780-4893-a24f-6d6ffc044092', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.TimePeriod.beginPosition', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.TimePeriod.beginPosition.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
3);"),
# 1.10.1.10.1.3.1.1.1.4 identificationInfo -> MD_DataIdentification -> extent -> EX_Extent -> temporalElement -> EX_TemporalExtent -> extent -> TimePeriod -> endPosition
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'83acc232-11bd-4cbe-a442-9e972524b969', \
'2c4ac1aa-0c41-440c-8e12-109681d56c79', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.TimePeriod.endPosition', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.extent.EX_Extent.temporalElement.EX_TemporalExtent.extent.TimePeriod.endPosition.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
False, \
null, \
4);"),
# 1.10.1.11 identificationInfo -> MD_DataIdentification -> supplementalInformation
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'28f1d340-c5b1-4be0-89ef-fe1fb327db86', \
'eabb2148-d540-48f0-beb3-a3978b950e57', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.supplementalInformation', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.supplementalInformation.help', \
'e2c6f819-5ef3-4104-ae04-480f3fd48d73', \
'03bf8631-94a1-41e5-b91d-e8c7c4902125', \
False, \
null, \
11);"),
# 1.10.1.11.1 identificationInfo -> MD_DataIdentification -> supplementalInformation -> CharacterString
migrations.RunSQL("INSERT INTO rdm_fields (schema_id, parent_id, id, label, help, metadata_ui_type_id, metadata_value_type_id, many_values, choice_list_id, default_order) \
VALUES ( \
'b36b79b2-55bd-4e90-ad0e-b1b72086c2d6', \
'eabb2148-d540-48f0-beb3-a3978b950e57', \
'269f935b-ac7c-45ea-89e7-db0c91f30b33', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.supplementalInformation.CharacterString', \
'en.metadata.iso.19115.gmd.identificationInfo.MD_DataIdentification.supplementalInformation.CharacterString.help', \
'9a8b45b0-ed13-4524-8f7d-0059ec3c156e', \
'e079c97d-9f5c-4dbc-8acb-b21c21827f1b', \
True, \
null, \
1);"),
]
| 67.663248 | 215 | 0.63461 | 16,219 | 158,332 | 5.997842 | 0.063259 | 0.027467 | 0.044501 | 0.061617 | 0.963887 | 0.906784 | 0.894725 | 0.879532 | 0.871504 | 0.837858 | 0 | 0.167185 | 0.249962 | 158,332 | 2,339 | 216 | 67.692176 | 0.651973 | 0.120014 | 0 | 0.791209 | 0 | 0.191808 | 0.472527 | 0.471923 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.0005 | 0 | 0.001998 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
bfb5771b6daafdc895059d74068864d2895addce | 122 | py | Python | platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/sol/calculators/calc_ber.py | PascalGuenther/gecko_sdk | 2e82050dc8823c9fe0e8908c1b2666fb83056230 | [
"Zlib"
] | 82 | 2016-06-29T17:24:43.000Z | 2021-04-16T06:49:17.000Z | platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/sol/calculators/calc_ber.py | PascalGuenther/gecko_sdk | 2e82050dc8823c9fe0e8908c1b2666fb83056230 | [
"Zlib"
] | 6 | 2022-01-12T18:22:08.000Z | 2022-03-25T10:19:27.000Z | platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/sol/calculators/calc_ber.py | PascalGuenther/gecko_sdk | 2e82050dc8823c9fe0e8908c1b2666fb83056230 | [
"Zlib"
] | 56 | 2016-08-02T10:50:50.000Z | 2021-07-19T08:57:34.000Z | from pyradioconfig.parts.ocelot.calculators.calc_ber import CALC_Ber_Ocelot
class Calc_BER_Sol(CALC_Ber_Ocelot):
pass | 30.5 | 75 | 0.852459 | 19 | 122 | 5.105263 | 0.578947 | 0.28866 | 0.268041 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.090164 | 122 | 4 | 76 | 30.5 | 0.873874 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 8 |
bfc40244d4116e9a175b7a532be8ae059277af0e | 8,442 | py | Python | pykrev/tests/test_diversity_unittest.py | Kzra/pykrev | 1a328fccded962f309e951c8509b87a82c3d3ae6 | [
"MIT"
] | 4 | 2021-02-18T10:19:13.000Z | 2021-10-04T16:17:30.000Z | pykrev/tests/test_diversity_unittest.py | erikafreeman/pykrev | 1a328fccded962f309e951c8509b87a82c3d3ae6 | [
"MIT"
] | null | null | null | pykrev/tests/test_diversity_unittest.py | erikafreeman/pykrev | 1a328fccded962f309e951c8509b87a82c3d3ae6 | [
"MIT"
] | 1 | 2021-09-23T16:03:03.000Z | 2021-09-23T16:03:03.000Z | import unittest
import numpy as np
from pykrev import diversity_indices, ordination_matrix, bray_curtis_matrix, compound_class, normalise_intensity
class TestDIVERSITY(unittest.TestCase):
def setUp(self):
pass
def test_sum_relative_intensity(self):
z = np.array([100,200,300])
correct = np.array([100/600,200/600,300/600])
res = normalise_intensity(z)
self.assertIsNone(np.testing.assert_array_equal(np.round(res,3),np.round(correct,3)))
def test_max_relative_intensity(self):
z = np.array([100,200,300])
correct = np.array([100/300,200/300,300/300])
res = normalise_intensity(z, norm_method = 'max')
self.assertIsNone(np.testing.assert_array_equal(np.round(res,3),np.round(correct,3)))
def test_unit_relative_intensity(self):
z = np.array([100,200,300])
res = normalise_intensity(z, norm_method = 'unit_vector')
self.assertEqual(np.round(sum(res**2),3),1.00)
def test_zscore_relative_intensity(self):
z = np.array([100,200,300,400,21,321,342,543])
res = normalise_intensity(z, norm_method = 'zscore')
self.assertEqual(np.round(np.mean(res),3),0.00)
self.assertEqual(np.round(np.std(res),3),1.00)
def test_minmax_relative_intensity(self):
z = np.array([100,200,300,400,21,321,342,543])
res = normalise_intensity(z, norm_method = 'minmax')
self.assertEqual(min(res),0)
self.assertEqual(max(res),1)
def test_mean_relative_intensity(self):
z = np.array([100,200,300,400,21,321,342,543])
res = normalise_intensity(z, norm_method = 'mean')
self.assertEqual(np.round(np.mean(res),3),0.00)
def test_median_relative_intensity(self):
z = np.array([100,200,300,400,21,321,342,543])
res = normalise_intensity(z, norm_method = 'median')
self.assertEqual(np.round(np.median(res),3),0.00)
def test_binary_relative_intensity(self):
z = np.array([100,0,300,400,21,321,0,543])
res = normalise_intensity(z, norm_method = 'binary')
self.assertEqual(sum(res),6)
def test_richness(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
correct = {'D_r':10}
res = diversity_indices(x,z, indices = ['r'])
self.assertEqual(res, correct)
def test_GS(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
correct = {'D_a_GS':0.8593}
res = diversity_indices(x,z, indices = ['GS'])
self.assertEqual(np.around(res['D_a_GS'],3), np.around(correct['D_a_GS'],3))
def test_SW(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
correct = {'D_a_SW':2.09}
res = diversity_indices(x,z, indices = ['SW'])
self.assertEqual(np.around(res['D_a_SW'],3), np.around(correct['D_a_SW'],3))
def test_functionalC(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
res = diversity_indices(x,z, indices = ['N'])
def test_functionalNC(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
res = diversity_indices(x,z, indices = ['NC'])
def test_functionalrAI(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
res = diversity_indices(x,z, indices = ['rAI'])
def test_functionalmz(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
mz = np.array([232,340,132,904,321,431,3424,200,3204,1000])
res = diversity_indices(x,z, mz_list = mz, indices = ['mz'])
def test_ordination_matrix(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
x2 = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H31NO3', 'C11H12N1O2', 'C5H73O3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
z2 = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
ores = ordination_matrix(molecular_formulas = [x,x2],peak_intensities = [z,z2])
def test_normalise_ordination(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
x2 = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H31NO3', 'C11H12N1O2', 'C5H73O3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
z2 = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
ores = ordination_matrix(molecular_formulas = [x,x2],peak_intensities = [z,z2])
n1res = normalise_intensity(ores)
n2res = normalise_intensity(ores, norm_subset = 'PPP', norm_method = 'binary')
n3res = normalise_intensity(ores, norm_subset = 'LOS', p_L = 3, norm_method = 'minmax')
n4res = normalise_intensity(ores, norm_subset = 'PPP', p_P = 0.73, norm_method = 'zscore')
n5res = normalise_intensity(ores, norm_subset = 'PPP', p_P = 0.02, norm_method = 'mean')
n6res = normalise_intensity(ores, norm_subset = 'LOS', p_P = 0.02, norm_method = 'mean', p_L = 1000)
n7res = normalise_intensity(ores, norm_subset = 'LOS', p_P = 0.02, norm_method = 'mean', p_L = 1000, log = True)
n8res = normalise_intensity(ores, norm_subset = 'ALL', p_P = 0.02, norm_method = 'none', p_L = 1000, log = True)
def test_bray_curtis_matrix(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
x2 = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H31NO3', 'C11H12N1O2', 'C5H73O3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
z2 = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
ores = ordination_matrix(molecular_formulas = [x,x2],peak_intensities = [z,z2])
bres = bray_curtis_matrix(np.array(ores))
def test_compound_class_MSCC(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
res = compound_class(x,mass_list =z, method = 'MSCC')
def test_compound_class_KELL(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
res = compound_class(x, method = 'KELL')
def test_compound_class_FORM(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
res = compound_class(x, method = 'FORM')
def test_compound_class_KEGG(self):
x = ['C13H14O5','C13H14N2O4S2','C36H45ClN6O12','C9H11NO2', 'C9H11NO3', 'C11H12N2O2', 'C5H7NO3', 'C5H9NO3', 'C6H12N2O4S2','C6H11NO3S']
z = np.array([1000,2432,3000,4201,2000,5990,1000,6520,8000,9001])
res = compound_class(x, method = 'KEGG_All')
if __name__ == '__main__':
unittest.main()
| 57.040541 | 142 | 0.653163 | 1,092 | 8,442 | 4.909341 | 0.134615 | 0.037866 | 0.03283 | 0.130013 | 0.808058 | 0.781384 | 0.745756 | 0.700616 | 0.700616 | 0.679351 | 0 | 0.258926 | 0.170576 | 8,442 | 147 | 143 | 57.428571 | 0.506712 | 0 | 0 | 0.393443 | 0 | 0 | 0.208244 | 0 | 0 | 0 | 0 | 0 | 0.106557 | 1 | 0.188525 | false | 0.008197 | 0.02459 | 0 | 0.221311 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
781cee16fad62a0815b33d61c7fb8140c7f27fd7 | 3,243 | py | Python | tests/file_iterators/test_pidgin.py | mtlynch/chat_unifier | 7449ca4f2dd48d8b76fc29e150643076dd0b3334 | [
"MIT"
] | 2 | 2018-10-16T18:39:06.000Z | 2019-01-22T01:38:09.000Z | tests/file_iterators/test_pidgin.py | mtlynch/chat_unifier | 7449ca4f2dd48d8b76fc29e150643076dd0b3334 | [
"MIT"
] | 2 | 2018-10-19T00:00:21.000Z | 2018-10-19T17:15:25.000Z | tests/file_iterators/test_pidgin.py | mtlynch/chat_unifier | 7449ca4f2dd48d8b76fc29e150643076dd0b3334 | [
"MIT"
] | null | null | null | import os
import unittest
import mock
from chat_unifier.file_iterators import pidgin
class PidginFileIteratorTest(unittest.TestCase):
def setUp(self):
self.maxDiff = None
def test_picks_correct_log_files(self):
with mock.patch.object(os, 'walk') as mock_walk:
mock_walk.return_value = [
('/logs', ('aim',), ('README.txt',)),
('/logs/aim', ('LocalUser123',), ()),
('/log/aim/LocalUser123', ('RemoteUser345', 'RemoteUser456'),
()),
('/log/aim/LocalUser123/RemoteUser345', (),
('2007-02-24.020826-0500EST.html',
'2007-02-25.154550-0500EST.html')),
('/log/aim/LocalUser123/RemoteUser456', (),
('2006-11-19.195755-0500EST.html',
'2006-11-22.112333-0500EST.html')),
]
self.assertEqual([
'/log/aim/LocalUser123/RemoteUser345/2007-02-24.020826-0500EST.html',
'/log/aim/LocalUser123/RemoteUser345/2007-02-25.154550-0500EST.html',
'/log/aim/LocalUser123/RemoteUser456/2006-11-19.195755-0500EST.html',
'/log/aim/LocalUser123/RemoteUser456/2006-11-22.112333-0500EST.html',
], [f for f in pidgin.iterate_files('/logs')])
def test_ignores_irc_log_files(self):
with mock.patch.object(os, 'walk') as mock_walk:
mock_walk.return_value = [
('/logs', ('aim', 'irc'), ('README.txt',)),
('/logs/aim', ('LocalUser123',), ()),
('/log/aim/LocalUser123', ('RemoteUser345',), ()),
('/log/aim/LocalUser123/RemoteUser345', (),
('2007-02-24.020826-0500EST.html',
'2007-02-25.154550-0500EST.html')),
('/log/irc', ('localuser123@irc.freenode.net',), ()),
('/log/irc/localuser123@irc.freenode.net', ('#dummy.chat',),
()),
('/log/irc/localuser123@irc.freenode.net/#dummy.chat', (),
('2006-06-21.200806-0400EST.html',)),
]
self.assertEqual([
'/log/aim/LocalUser123/RemoteUser345/2007-02-24.020826-0500EST.html',
'/log/aim/LocalUser123/RemoteUser345/2007-02-25.154550-0500EST.html',
], [f for f in pidgin.iterate_files('/logs')])
def test_ignores_system_log_files(self):
with mock.patch.object(os, 'walk') as mock_walk:
mock_walk.return_value = [
('/logs', ('aim',), ()),
('/logs/aim', ('LocalUser123',), ()),
('/log/aim/LocalUser123', ('RemoteUser345', '.system'), ()),
('/log/aim/LocalUser123/RemoteUser345', (),
('2007-02-24.020826-0500EST.html',
'2007-02-25.154550-0500EST.html')),
('/log/aim/LocalUser123/.system', (),
('2007-03-05.231324-0500EST.html',)),
]
self.assertEqual([
'/log/aim/LocalUser123/RemoteUser345/2007-02-24.020826-0500EST.html',
'/log/aim/LocalUser123/RemoteUser345/2007-02-25.154550-0500EST.html',
], [f for f in pidgin.iterate_files('/logs')])
| 45.676056 | 85 | 0.531915 | 329 | 3,243 | 5.164134 | 0.209726 | 0.167746 | 0.169511 | 0.218952 | 0.862861 | 0.862861 | 0.832843 | 0.803414 | 0.733373 | 0.66392 | 0 | 0.190601 | 0.291397 | 3,243 | 70 | 86 | 46.328571 | 0.548738 | 0 | 0 | 0.516129 | 0 | 0 | 0.429232 | 0.362936 | 0 | 0 | 0 | 0 | 0.048387 | 1 | 0.064516 | false | 0 | 0.064516 | 0 | 0.145161 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
78297856fa436760265c7475cfefca210304edd7 | 689 | py | Python | gameComponents/compareGame.py | mehta-asim/Mehta_A_RPS_Fall2020 | c07b9bb749076bdf181f15acb6be9a5bc8cc0825 | [
"MIT"
] | null | null | null | gameComponents/compareGame.py | mehta-asim/Mehta_A_RPS_Fall2020 | c07b9bb749076bdf181f15acb6be9a5bc8cc0825 | [
"MIT"
] | null | null | null | gameComponents/compareGame.py | mehta-asim/Mehta_A_RPS_Fall2020 | c07b9bb749076bdf181f15acb6be9a5bc8cc0825 | [
"MIT"
] | null | null | null | from gameComponents import gameVars, winLose
def rpscompare(c,p):
if (c == p):
print("tie")
elif (c == "rock"):
if (p == "scissors"):
gameVars.player_lives -= 1
print("you lose! player lives: ", gameVars.player_lives)
else:
print("you win!")
gameVars.computer_lives -= 1
elif (c == "paper"):
if (p == "scissors"):
gameVars.player_lives -= 1
print("you lose! player lives: ", gameVars.player_lives)
else:
print("you win!")
gameVars.computer_lives -= 1
elif (c == "scissors"):
if (p == "paper"):
gameVars.player_lives -= 1
print("you lose! player lives: ", gameVars.player_lives)
else:
print("you win!")
gameVars.computer_lives -= 1 | 22.966667 | 59 | 0.629898 | 92 | 689 | 4.619565 | 0.26087 | 0.232941 | 0.268235 | 0.141176 | 0.774118 | 0.774118 | 0.774118 | 0.774118 | 0.774118 | 0.774118 | 0 | 0.010909 | 0.201742 | 689 | 30 | 60 | 22.966667 | 0.761818 | 0 | 0 | 0.68 | 0 | 0 | 0.198551 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04 | false | 0 | 0.04 | 0 | 0.08 | 0.28 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
784cefad0faeb2ba34d9dbd8aaca8bc6168183e9 | 19,136 | py | Python | gnn_wrapper.py | FaezehAmou2020/torch_gnn | 996a7f94259e718c625c6b4594729f025c4e4f14 | [
"BSD-3-Clause"
] | null | null | null | gnn_wrapper.py | FaezehAmou2020/torch_gnn | 996a7f94259e718c625c6b4594729f025c4e4f14 | [
"BSD-3-Clause"
] | null | null | null | gnn_wrapper.py | FaezehAmou2020/torch_gnn | 996a7f94259e718c625c6b4594729f025c4e4f14 | [
"BSD-3-Clause"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
import dataloader
import torch.optim as optim
from abc import ABCMeta, abstractmethod
from utils import Accuracy
from torch.utils.tensorboard import SummaryWriter
import torchvision
from utils import matplotlib_imshow
import utils
from pygnn import GNN
import os
class GNNWrapper:
class Config:
def __init__(self):
self.device = None
self.use_cuda = None
self.dataset_path = None
self.log_interval = None
self.tensorboard = None
self.task_type = None
# hyperparams
self.lrw = None
self.loss_f = None
self.epochs = None
self.convergence_threshold = None
self.max_iterations = None
self.n_nodes = None
self.state_dim = None
self.label_dim = None
self.output_dim = None
self.graph_based = False
self.activation = torch.nn.Tanh()
self.state_transition_hidden_dims = None
self.output_function_hidden_dims = None
self.task_type = "semisupervised"
self.state_net = None #
self.dset_name = None
self.aggregation_type = None
# optional
# self.loss_w = 1.
# self.energy_weight = 0.
# self.l2_weight = 0.
def __init__(self, config: Config):
self.config = config
# to be populated
self.optimizer = None
self.criterion = None
self.train_loader = None
self.test_loader = None
self.state_net = config.state_net
if self.config.tensorboard:
#self.writer = SummaryWriter('logs/tensorboard')
# self.writer = SummaryWriter(comment=f'/{str(self.state_net)[0:15]}')
self.writer = SummaryWriter(log_dir=os.path.join("logs",
f"lrw__{self.config.lrw}_state__{self.config.state_dim}",
f"{str(self.state_net).split('(')[0]}"))
self.first_flag_writer = True
# #plotting
# self.arr_its_train = []
# self.arr_acc_train = []
# self.arr_its_test = []
# self.arr_acc_test = []
# #/plotting
# Write logs into a file:
self.file_logs = open(f"txtlogs.txt", "a")
self.file_logs.write(
f" *#*#*#*#*# Logs for: Dataset:{self.config.dset_name}, learning_rate: {self.config.lrw}, state_dim:{self.config.state_dim}, aggregation function:{str(self.state_net).split('(')[0]} , aggregation type:{self.config.aggregation_type} *#*#*#*#*# \n")
self.file_logs.close()
def __call__(self, dset, state_net=None, out_net=None):
# handle the dataset info
self._data_loader(dset)
self.gnn = GNN(self.config, state_net, out_net).to(self.config.device)
self._criterion()
self._optimizer()
self._accuracy()
def _data_loader(self, dset): # handle dataset data and metadata
self.dset = dset.to(self.config.device)
self.config.label_dim = self.dset.node_label_dim
self.config.n_nodes = self.dset.num_nodes
self.config.output_dim = self.dset.num_classes
def _optimizer(self):
# for name, param in self.gnn.named_parameters():
# if param.requires_grad:
# print(name, param.data)
# exit()
self.optimizer = optim.Adam(self.gnn.parameters(), lr=self.config.lrw)
#self.optimizer = optim.SGD(self.gnn.parameters(), lr=self.config.lrw)
def _criterion(self):
self.criterion = nn.CrossEntropyLoss()
def _accuracy(self):
self.TrainAccuracy = Accuracy(type=self.config.task_type)
self.ValidAccuracy = Accuracy(type=self.config.task_type)
self.TestAccuracy = Accuracy(type=self.config.task_type)
def train_step(self, epoch):
self.gnn.train()
data = self.dset
self.optimizer.zero_grad()
self.TrainAccuracy.reset()
# output computation
output, iterations = self.gnn(data.edges, data.agg_matrix, data.node_labels)
# loss computation - semisupervised
loss = self.criterion(output, data.targets)
loss.backward()
self.optimizer.step()
# # updating accuracy
# batch_acc = self.TrainAccuracy.update((output, target), batch_compute=True)
with torch.no_grad(): # Accuracy computation
# accuracy_train = torch.mean(
# (torch.argmax(output[data.idx_train], dim=-1) == data.targets[data.idx_train]).float())
self.TrainAccuracy.update(output, data.targets)
accuracy_train = self.TrainAccuracy.compute()
if epoch % self.config.log_interval == 0:
print(
'Train Epoch: {} \t Mean Loss: {:.6f}\tAccuracy Full Batch: {:.6f} \t Best Accuracy : {:.6f} \t Iterations: {}'.format(
epoch, loss, accuracy_train, self.TrainAccuracy.get_best(), iterations))
self.file_logs = open(f"txtlogs.txt", "a")
self.file_logs.write('Train Epoch: {} \t Mean Loss: {:.6f}\tAccuracy Full Batch: {:.6f} \t Best Accuracy : {:.6f} \t Iterations: {} \n'.format(
epoch, loss, accuracy_train, self.TrainAccuracy.get_best(), iterations))
self.file_logs.close()
# #plotting
# self.arr_its_train.append(epoch)
# self.arr_acc_train.append(accuracy_train)
# #/plotting
if self.config.tensorboard:
self.writer.add_scalar(
f'Training Accuracy_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
accuracy_train,
epoch)
self.writer.add_scalar(
f'Training Loss_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
loss,
epoch)
self.writer.add_scalar(
f'Training Iterations_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
iterations,
epoch)
for name, param in self.gnn.named_parameters():
self.writer.add_histogram(name, param, epoch)
return accuracy_train #
# self.TrainAccuracy.reset()
def predict(self, edges, agg_matrix, node_labels):
return self.gnn(edges, agg_matrix, node_labels)
def test_step(self, epoch):
#### TEST
self.gnn.eval()
data = self.dset
self.TestAccuracy.reset()
with torch.no_grad():
output, iterations = self.gnn(data.edges, data.agg_matrix, data.node_labels)
test_loss = self.criterion(output, data.targets)
self.TestAccuracy.update(output, data.targets)
acc_test = self.TestAccuracy.compute()
# acc_test = torch.mean(
# (torch.argmax(output[data.idx_test], dim=-1) == data.targets[data.idx_test]).float())
if epoch % self.config.log_interval == 0:
print('Test set: Average loss: {:.4f}, Accuracy: ({:.4f}%) , Best Accuracy: ({:.4f}%)'.format(
test_loss, acc_test, self.TestAccuracy.get_best()))
self.file_logs = open(f"txtlogs.txt", "a")
self.file_logs.write('Test set: Average loss: {:.4f}, Accuracy: ({:.4f}%) , Best Accuracy: ({:.4f}%) \n'.format(
test_loss, acc_test, self.TestAccuracy.get_best()))
self.file_logs.close()
# #plotting
# self.arr_its_test.append(epoch)
# self.arr_acc_test.append(acc_test)
# #/plotting
if self.config.tensorboard:
self.writer.add_scalar(
f'Test Accuracy_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
acc_test,
epoch)
self.writer.add_scalar(
f'Test Loss_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
test_loss,
epoch)
self.writer.add_scalar(
f'Test Iterations_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
iterations,
epoch)
return acc_test
def valid_step(self, epoch):
#### TEST
self.gnn.eval()
data = self.dset
self.ValidAccuracy.reset()
with torch.no_grad():
output, iterations = self.gnn(data.edges, data.agg_matrix, data.node_labels)
test_loss = self.criterion(output, data.targets)
self.ValidAccuracy.update(output, data.targets)
acc_valid = self.ValidAccuracy.compute()
# acc_test = torch.mean(
# (torch.argmax(output[data.idx_test], dim=-1) == data.targets[data.idx_test]).float())
if epoch % self.config.log_interval == 0:
print('Valid set: Average loss: {:.4f}, Accuracy: ({:.4f}%) , Best Accuracy: ({:.4f}%)'.format(
test_loss, acc_valid, self.ValidAccuracy.get_best()))
self.file_logs = open(f"txtlogs.txt", "a")
self.file_logs.write('Valid set: Average loss: {:.4f}, Accuracy: ({:.4f}%) , Best Accuracy: ({:.4f}%) \n'.format(
test_loss, acc_valid, self.ValidAccuracy.get_best()))
self.file_logs.close()
if self.config.tensorboard:
self.writer.add_scalar(
f'Valid Accuracy_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
acc_valid,
epoch)
self.writer.add_scalar(
f'Valid Loss_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
test_loss,
epoch)
self.writer.add_scalar(
f'Valid Iterations_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
iterations,
epoch)
return acc_valid
class SemiSupGNNWrapper(GNNWrapper):
class Config:
def __init__(self):
self.device = None
self.use_cuda = None
self.dataset_path = None
self.log_interval = None
self.tensorboard = None
self.task_type = None
# hyperparams
self.lrw = None
self.loss_f = None
self.epochs = None
self.convergence_threshold = None
self.max_iterations = None
self.n_nodes = None
self.state_dim = None
self.label_dim = None
self.output_dim = None
self.graph_based = False
self.activation = torch.nn.Tanh()
self.state_transition_hidden_dims = None
self.output_function_hidden_dims = None
self.state_net = None
self.dset_name = None
self.aggregation_type = None
# optional
# self.loss_w = 1.
# self.energy_weight = 0.
# self.l2_weight = 0.
def __init__(self, config: Config):
super().__init__(config)
def _data_loader(self, dset): # handle dataset data and metadata
self.dset = dset.to(self.config.device)
self.config.label_dim = self.dset.node_label_dim
self.config.n_nodes = self.dset.num_nodes
self.config.output_dim = self.dset.num_classes
def _accuracy(self):
self.TrainAccuracy = Accuracy(type="semisupervised")
self.ValidAccuracy = Accuracy(type="semisupervised")
self.TestAccuracy = Accuracy(type="semisupervised")
def train_step(self, epoch):
self.gnn.train()
data = self.dset
self.optimizer.zero_grad()
self.TrainAccuracy.reset()
# output computation
output, iterations = self.gnn(data.edges, data.agg_matrix, data.node_labels)
# loss computation - semisupervised
loss = self.criterion(output[data.idx_train], data.targets[data.idx_train])
loss.backward()
# with torch.no_grad():
# for name, param in self.gnn.named_parameters():
# if "state_transition_function" in name:
# #self.writer.add_histogram("gradient " + name, param.grad, epoch)
# param.grad = 0* param.grad
self.optimizer.step()
# # updating accuracy
# batch_acc = self.TrainAccuracy.update((output, target), batch_compute=True)
with torch.no_grad(): # Accuracy computation
# accuracy_train = torch.mean(
# (torch.argmax(output[data.idx_train], dim=-1) == data.targets[data.idx_train]).float())
self.TrainAccuracy.update(output, data.targets, idx=data.idx_train)
accuracy_train = self.TrainAccuracy.compute()
if epoch % self.config.log_interval == 0:
print(
'Train Epoch: {} \t Mean Loss: {:.6f}\tAccuracy Full Batch: {:.6f} \t Best Accuracy : {:.6f} \t Iterations: {}'.format(
epoch, loss, accuracy_train, self.TrainAccuracy.get_best(), iterations))
self.file_logs = open(f"txtlogs.txt", "a")
self.file_logs.write('Train Epoch: {} \t Mean Loss: {:.6f}\tAccuracy Full Batch: {:.6f} \t Best Accuracy : {:.6f} \t Iterations: {} \n'.format(
epoch, loss, accuracy_train, self.TrainAccuracy.get_best(), iterations))
self.file_logs.close()
if self.config.tensorboard:
self.writer.add_scalar(
f'Training Accuracy_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
accuracy_train,
epoch)
self.writer.add_scalar(
f'Training Loss_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
loss,
epoch)
self.writer.add_scalar(
f'Training Iterations_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
iterations,
epoch)
for name, param in self.gnn.named_parameters():
self.writer.add_histogram(name, param, epoch)
self.writer.add_histogram("gradient " + name, param.grad, epoch)
# self.TrainAccuracy.reset()
return accuracy_train #
def predict(self, edges, agg_matrix, node_labels):
return self.gnn(edges, agg_matrix, node_labels)
def test_step(self, epoch):
#### TEST
self.gnn.eval()
data = self.dset
self.TestAccuracy.reset()
with torch.no_grad():
output, iterations = self.gnn(data.edges, data.agg_matrix, data.node_labels)
test_loss = self.criterion(output[data.idx_test], data.targets[data.idx_test])
self.TestAccuracy.update(output, data.targets, idx=data.idx_test)
acc_test = self.TestAccuracy.compute()
# acc_test = torch.mean(
# (torch.argmax(output[data.idx_test], dim=-1) == data.targets[data.idx_test]).float())
if epoch % self.config.log_interval == 0:
print('Test set: Average loss: {:.4f}, Accuracy: ({:.4f}%) , Best Accuracy: ({:.4f}%)'.format(
test_loss, acc_test, self.TestAccuracy.get_best()))
self.file_logs = open(f"txtlogs.txt", "a")
self.file_logs.write('Test set: Average loss: {:.4f}, Accuracy: ({:.4f}%) , Best Accuracy: ({:.4f}%) \n'.format(
test_loss, acc_test, self.TestAccuracy.get_best()))
self.file_logs.close()
if self.config.tensorboard:
self.writer.add_scalar(
f'Test Accuracy_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
acc_test,
epoch)
self.writer.add_scalar(
f'Test Loss_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
test_loss,
epoch)
self.writer.add_scalar(
f'Test Iterations_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
iterations,
epoch)
return acc_test
def valid_step(self, epoch):
#### TEST
self.gnn.eval()
data = self.dset
self.ValidAccuracy.reset()
with torch.no_grad():
output, iterations = self.gnn(data.edges, data.agg_matrix, data.node_labels)
test_loss = self.criterion(output[data.idx_valid], data.targets[data.idx_valid])
self.ValidAccuracy.update(output, data.targets, idx=data.idx_valid)
acc_valid = self.ValidAccuracy.compute()
# acc_test = torch.mean(
# (torch.argmax(output[data.idx_test], dim=-1) == data.targets[data.idx_test]).float())
if epoch % self.config.log_interval == 0:
print('Valid set: Average loss: {:.4f}, Accuracy: ({:.4f}%) , Best Accuracy: ({:.4f}%)'.format(
test_loss, acc_valid, self.ValidAccuracy.get_best()))
self.file_logs = open(f"txtlogs.txt", "a")
self.file_logs.write('Valid set: Average loss: {:.4f}, Accuracy: ({:.4f}%) , Best Accuracy: ({:.4f}%) \n'.format(
test_loss, acc_valid, self.ValidAccuracy.get_best()))
self.file_logs.close()
if self.config.tensorboard:
self.writer.add_scalar(
f'Valid Accuracy_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
acc_valid,
epoch)
self.writer.add_scalar(
f'Valid Loss_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
test_loss,
epoch)
self.writer.add_scalar(
f'Valid Iterations_{self.config.dset_name}/aggregation_{self.config.aggregation_type}',
iterations,
epoch)
return acc_valid
| 43.196388 | 262 | 0.552676 | 2,058 | 19,136 | 4.945092 | 0.086492 | 0.07173 | 0.028103 | 0.033605 | 0.848678 | 0.833546 | 0.812027 | 0.784416 | 0.780878 | 0.763978 | 0 | 0.004885 | 0.336695 | 19,136 | 442 | 263 | 43.294118 | 0.796896 | 0.116796 | 0 | 0.81759 | 0 | 0.042345 | 0.181532 | 0.093473 | 0 | 0 | 0 | 0 | 0 | 1 | 0.061889 | false | 0 | 0.042345 | 0.006515 | 0.143322 | 0.019544 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
786f11e84329a851f68befffe885310368e37266 | 16,385 | py | Python | src/the_tale/the_tale/accounts/tests/test_requests_profile.py | devapromix/the-tale | 2a10efd3270734f8cf482b4cfbc5353ef8f0494c | [
"BSD-3-Clause"
] | 1 | 2020-04-02T11:51:20.000Z | 2020-04-02T11:51:20.000Z | src/the_tale/the_tale/accounts/tests/test_requests_profile.py | devapromix/the-tale | 2a10efd3270734f8cf482b4cfbc5353ef8f0494c | [
"BSD-3-Clause"
] | null | null | null | src/the_tale/the_tale/accounts/tests/test_requests_profile.py | devapromix/the-tale | 2a10efd3270734f8cf482b4cfbc5353ef8f0494c | [
"BSD-3-Clause"
] | null | null | null |
import smart_imports
smart_imports.all()
class ProfileRequestsTests(utils_testcase.TestCase, third_party_helpers.ThirdPartyTestsMixin):
def setUp(self):
super(ProfileRequestsTests, self).setUp()
game_logic.create_test_map()
self.account = self.accounts_factory.create_account()
self.account_nick = self.account.nick
self.account_email = self.account.email
def test_refuse_third_party__profile_page(self):
self.request_third_party_token(account=self.account)
self.check_html_ok(self.request_html(django_reverse('accounts:profile:show')), texts=['third_party.access_restricted'])
def test_profile_page_unlogined(self):
self.check_redirect(django_reverse('accounts:profile:show'), logic.login_page_url(django_reverse('accounts:profile:show')))
def test_profile_page__fast_account(self):
self.request_login(self.account.email)
self.account.is_fast = True
self.account.save()
texts = [('pgf-fast-account-help-block', 1),
('pgf-fast-account-user-agreement-block', 1)]
self.check_html_ok(self.request_html(django_reverse('accounts:profile:show')), texts=texts)
def test_profile_page__normal_account(self):
self.request_login(self.account.email)
texts = [('pgf-fast-account-help-block', 0),
('pgf-fast-account-user-agreement-block', 0)]
self.check_html_ok(self.request_html(django_reverse('accounts:profile:show')), texts=texts)
def test_profile_page_logined(self):
self.request_login(self.account.email)
response = self.client.get(django_reverse('accounts:profile:show'))
self.assertEqual(response.status_code, 200)
def test_refuse_third_party__profile_edited(self):
self.request_third_party_token(account=self.account)
self.check_html_ok(self.request_html(django_reverse('accounts:profile:edited')), texts=['third_party.access_restricted'])
def test_profile_edited(self):
self.request_login(self.account.email)
response = self.client.get(django_reverse('accounts:profile:edited'))
self.assertEqual(response.status_code, 200)
def test_profile_confirm_email_request(self):
self.request_login(self.account.email)
response = self.client.get(django_reverse('accounts:profile:confirm-email-request'))
self.assertEqual(response.status_code, 200)
def test_refuse_third_party__confirm_email_request(self):
self.request_third_party_token(account=self.account)
self.check_html_ok(self.request_html(django_reverse('accounts:profile:confirm-email-request')), texts=['third_party.access_restricted'])
def test_profile_update_password(self):
self.request_login(self.account.email)
response = self.client.post(django_reverse('accounts:profile:update'), {'email': self.account.email, 'password': '222222', 'nick': 'test_user'})
self.assertEqual(response.status_code, 200)
self.check_ajax_processing(response, PostponedTaskPrototype._db_get_object(0).status_url)
self.assertEqual(models.ChangeCredentialsTask.objects.all().count(), 1)
self.assertEqual(models.ChangeCredentialsTask.objects.all()[0].state, relations.CHANGE_CREDENTIALS_TASK_STATE.CHANGING)
def test_refuse_third_party__update(self):
self.request_third_party_token(account=self.account)
self.check_ajax_error(self.client.post(django_reverse('accounts:profile:update'),
{'email': self.account.email, 'password': '222222', 'nick': 'test_user'}), 'third_party.access_restricted')
def test_profile_update_nick(self):
self.request_login(self.account.email)
response = self.client.post(django_reverse('accounts:profile:update'), {'email': self.account.email, 'nick': 'test_nick'})
self.assertEqual(response.status_code, 200)
self.check_ajax_processing(response, PostponedTaskPrototype._db_get_object(0).status_url)
self.assertEqual(models.ChangeCredentialsTask.objects.all().count(), 1)
self.assertEqual(models.ChangeCredentialsTask.objects.all()[0].state, relations.CHANGE_CREDENTIALS_TASK_STATE.CHANGING)
def test_profile_update_nick__banned(self):
self.request_login(self.account.email)
self.account.ban_forum(1)
with self.check_not_changed(models.ChangeCredentialsTask.objects.all().count):
self.check_ajax_error(self.client.post(django_reverse('accounts:profile:update'),
{'email': self.account.email, 'nick': 'test_nick'}),
'accounts.profile.update.banned')
def test_profile_update_email(self):
self.request_login(self.account.email)
response = self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'nick': 'test_nick'})
self.assertEqual(response.status_code, 200)
self.check_ajax_ok(response, data={'next_url': django_reverse('accounts:profile:confirm-email-request')})
self.assertEqual(models.ChangeCredentialsTask.objects.all().count(), 1)
self.assertEqual(models.ChangeCredentialsTask.objects.all()[0].state, relations.CHANGE_CREDENTIALS_TASK_STATE.EMAIL_SENT)
self.assertEqual(post_service_models.Message.objects.all().count(), 1)
self.assertEqual(django_auth.authenticate(nick=self.account_nick, password='111111').id, self.account.id)
self.assertEqual(django_auth.authenticate(nick=self.account_nick, password='111111').email, self.account_email)
def test_profile_update_duplicate_email(self):
account = self.accounts_factory.create_account()
self.request_login(self.account.email)
response = self.client.post(django_reverse('accounts:profile:update'), {'nick': 'duplicated_user_2', 'email': account.email})
self.check_ajax_error(response, 'accounts.profile.update.used_email')
self.assertEqual(models.ChangeCredentialsTask.objects.all().count(), 0)
self.assertEqual(post_service_models.Message.objects.all().count(), 0)
self.assertEqual(django_auth.authenticate(nick=self.account_nick, password='111111').id, self.account.id)
self.assertEqual(django_auth.authenticate(nick=self.account_nick, password='111111').email, self.account_email)
def test_profile_update_duplicate_nick(self):
account = self.accounts_factory.create_account()
self.request_login(self.account.email)
response = self.client.post(django_reverse('accounts:profile:update'), {'nick': account.nick, 'email': 'duplicated_@test.com'})
self.check_ajax_error(response, 'accounts.profile.update.used_nick')
self.assertEqual(models.ChangeCredentialsTask.objects.all().count(), 0)
self.assertEqual(post_service_models.Message.objects.all().count(), 0)
self.assertEqual(django_auth.authenticate(nick=self.account_nick, password='111111').id, self.account.id)
self.assertEqual(django_auth.authenticate(nick=self.account_nick, password='111111').email, self.account_email)
def test_profile_update_fast_errors(self):
response = self.client.post(django_reverse('accounts:registration:fast'))
PostponedTaskPrototype(model=PostponedTask.objects.all()[0]).process(utils_fake.FakeLogger())
response = self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru'})
self.check_ajax_error(response, 'accounts.profile.update.form_errors')
response = self.client.post(django_reverse('accounts:profile:update'), {'password': '111111'})
self.check_ajax_error(response, 'accounts.profile.update.form_errors')
response = self.client.post(django_reverse('accounts:profile:update'), {'nick': 'test_nick'})
self.check_ajax_error(response, 'accounts.profile.update.form_errors')
response = self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'nick': 'test_nick'})
self.check_ajax_error(response, 'accounts.profile.update.empty_fields')
response = self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'password': '111111'})
self.check_ajax_error(response, 'accounts.profile.update.form_errors')
response = self.client.post(django_reverse('accounts:profile:update'), {'password': '111111', 'nick': 'test_nick'})
self.check_ajax_error(response, 'accounts.profile.update.form_errors')
self.assertEqual(models.ChangeCredentialsTask.objects.all().count(), 0)
self.assertEqual(post_service_models.Message.objects.all().count(), 0)
def test_profile_confirm_email(self):
self.request_login(self.account.email)
self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'nick': 'test_nick'})
self.assertEqual(PostponedTaskPrototype._model_class.objects.all().count(), 0)
uuid = models.ChangeCredentialsTask.objects.all()[0].uuid
response = self.client.get(django_reverse('accounts:profile:confirm-email') + '?uuid=' + uuid)
self.check_response_redirect(response, PostponedTaskPrototype._db_get_object(0).wait_url)
self.assertEqual(models.ChangeCredentialsTask.objects.all().count(), 1)
self.assertEqual(models.ChangeCredentialsTask.objects.all()[0].state, relations.CHANGE_CREDENTIALS_TASK_STATE.CHANGING)
self.assertEqual(post_service_models.Message.objects.all().count(), 1)
def test_refuse_third_party__confirm_email(self):
self.request_login(self.account.email)
self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'nick': 'test_nick'})
self.assertEqual(PostponedTaskPrototype._model_class.objects.all().count(), 0)
uuid = models.ChangeCredentialsTask.objects.all()[0].uuid
self.request_third_party_token(account=self.account)
self.check_ajax_error(self.client.get(django_reverse('accounts:profile:confirm-email') + '?uuid=' + uuid), 'third_party.access_restricted')
def test_fast_profile_confirm_email(self):
self.client.post(django_reverse('accounts:registration:fast'))
PostponedTaskPrototype(model=PostponedTask.objects.all()[0]).process(utils_fake.FakeLogger())
self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'nick': 'test_nick', 'password': '123456'})
self.assertEqual(post_service_models.Message.objects.all().count(), 1)
uuid = models.ChangeCredentialsTask.objects.all()[0].uuid
response = self.client.get(django_reverse('accounts:profile:confirm-email') + '?uuid=' + uuid)
self.check_response_redirect(response, PostponedTaskPrototype._db_get_object(1).wait_url)
self.assertEqual(models.ChangeCredentialsTask.objects.all().count(), 1)
self.assertEqual(models.ChangeCredentialsTask.objects.all()[0].state, relations.CHANGE_CREDENTIALS_TASK_STATE.CHANGING)
self.assertEqual(django_auth.authenticate(nick='test_nick', password='123456'), None)
def test_profile_confirm_email_for_unlogined(self):
self.request_login(self.account.email)
self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'nick': 'test_nick'})
self.request_logout()
uuid = models.ChangeCredentialsTask.objects.all()[0].uuid
response = self.client.get(django_reverse('accounts:profile:confirm-email') + '?uuid=' + uuid)
self.check_response_redirect(response, PostponedTaskPrototype._db_get_object(0).wait_url)
def test_confirm_email__wrong_task(self):
self.request_login(self.account.email)
self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'nick': 'test_nick'})
self.check_html_ok(self.client.get(dext_urls.url('accounts:profile:confirm-email', uuid='wronguuid'), texts=['pgf-change-credentials-wrong-link']))
def test_confirm_email__already_processed(self):
self.request_login(self.account.email)
self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'nick': 'test_nick'})
task = prototypes.ChangeCredentialsTaskPrototype._db_get_object(0)
task._model.state = relations.CHANGE_CREDENTIALS_TASK_STATE.PROCESSED
task._model.save()
self.check_html_ok(self.client.get(dext_urls.url('accounts:profile:confirm-email', uuid=task.uuid), texts=['pgf-change-credentials-already-processed']))
def test_confirm_email__wrong_timeout(self):
self.request_login(self.account.email)
self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'nick': 'test_nick'})
task = prototypes.ChangeCredentialsTaskPrototype._db_get_object(0)
task._model.state = relations.CHANGE_CREDENTIALS_TASK_STATE.TIMEOUT
task._model.save()
self.check_html_ok(self.client.get(dext_urls.url('accounts:profile:confirm-email', uuid=task.uuid), texts=['pgf-change-credentials-timeout']))
def test_confirm_email__error_occured(self):
self.request_login(self.account.email)
self.client.post(django_reverse('accounts:profile:update'), {'email': 'test_user@test.ru', 'nick': 'test_nick'})
task = prototypes.ChangeCredentialsTaskPrototype._db_get_object(0)
task._model.state = relations.CHANGE_CREDENTIALS_TASK_STATE.ERROR
task._model.save()
self.check_html_ok(self.client.get(dext_urls.url('accounts:profile:confirm-email', uuid=task.uuid), texts=['pgf-change-credentials-error']))
def test_update_last_news_reminder_time_unlogined(self):
self.check_ajax_error(self.client.post(django_reverse('accounts:profile:update-last-news-reminder-time')), 'common.login_required')
def test_update_last_news_reminder_time(self):
self.request_login(self.account.email)
self.check_ajax_ok(self.client.post(django_reverse('accounts:profile:update-last-news-reminder-time')))
self.assertTrue(self.account.last_news_remind_time < prototypes.AccountPrototype.get_by_id(self.account.id).last_news_remind_time)
def test_profile_update_settings__personal_messages(self):
self.request_login(self.account.email)
self.assertTrue(self.account.personal_messages_subscription)
response = self.client.post(django_reverse('accounts:profile:update-settings'), {'personal_messages_subscription': False, 'gender': game_relations.GENDER.FEMALE})
self.assertFalse(prototypes.AccountPrototype.get_by_id(self.account.id).personal_messages_subscription)
self.check_ajax_ok(response, data={'next_url': django_reverse('accounts:profile:edited')})
def test_profile_update_settings__bews(self):
self.request_login(self.account.email)
self.assertTrue(self.account.news_subscription)
response = self.client.post(django_reverse('accounts:profile:update-settings'), {'news_subscription': False, 'gender': game_relations.GENDER.FEMALE})
self.assertFalse(prototypes.AccountPrototype.get_by_id(self.account.id).news_subscription)
self.check_ajax_ok(response, data={'next_url': django_reverse('accounts:profile:edited')})
def test_profile_update_settings__description(self):
self.request_login(self.account.email)
self.assertEqual(self.account.description, '')
response = self.client.post(django_reverse('accounts:profile:update-settings'), {'description': 'new-description', 'gender': game_relations.GENDER.FEMALE})
self.assertEqual(prototypes.AccountPrototype.get_by_id(self.account.id).description, 'new-description')
self.check_ajax_ok(response, data={'next_url': django_reverse('accounts:profile:edited')})
def test_profile_update_settings__gender(self):
self.request_login(self.account.email)
self.assertTrue(self.account.gender.is_MALE)
response = self.client.post(django_reverse('accounts:profile:update-settings'), {'gender': game_relations.GENDER.FEMALE})
self.assertTrue(prototypes.AccountPrototype.get_by_id(self.account.id).gender.is_FEMALE)
self.check_ajax_ok(response, data={'next_url': django_reverse('accounts:profile:edited')})
| 60.238971 | 170 | 0.733354 | 2,005 | 16,385 | 5.737656 | 0.077805 | 0.061196 | 0.087622 | 0.111961 | 0.913682 | 0.874913 | 0.845619 | 0.834492 | 0.787118 | 0.76782 | 0 | 0.009657 | 0.134208 | 16,385 | 271 | 171 | 60.461255 | 0.801283 | 0 | 0 | 0.504902 | 0 | 0 | 0.18158 | 0.130493 | 0 | 0 | 0 | 0 | 0.210784 | 1 | 0.156863 | false | 0.068627 | 0.009804 | 0 | 0.171569 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
78b33fa533f8623d46d06914a65f1be4a3784250 | 13,727 | py | Python | core/testing/test_distributions.py | Ardavans/sHDP | 4fc18a86668b1c1eebd416857184d60079db2ed6 | [
"MIT"
] | 74 | 2016-06-17T04:57:17.000Z | 2022-03-29T13:18:58.000Z | core/testing/test_distributions.py | Ardavans/sHDP | 4fc18a86668b1c1eebd416857184d60079db2ed6 | [
"MIT"
] | 9 | 2017-01-09T08:18:24.000Z | 2021-03-31T20:23:58.000Z | core/testing/test_distributions.py | Ardavans/sHDP | 4fc18a86668b1c1eebd416857184d60079db2ed6 | [
"MIT"
] | 25 | 2016-04-14T20:54:48.000Z | 2021-09-17T21:12:39.000Z | from __future__ import division
import numpy as np
from nose.plugins.attrib import attr
from .. import distributions as distributions
from mixins import BigDataGibbsTester, GewekeGibbsTester, BasicTester, mkdir
@attr('geometric')
class TestGeometric(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.Geometric
@property
def hyperparameter_settings(self):
return (dict(alpha_0=2,beta_0=20),dict(alpha_0=5,beta_0=5))
def params_close(self,d1,d2):
return np.allclose(d1.p,d2.p,rtol=0.05)
def geweke_statistics(self,d,data):
return d.p
@property
def geweke_pval(self):
return 0.5
@attr('poisson')
class TestPoisson(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.Poisson
@property
def hyperparameter_settings(self):
return (dict(alpha_0=30,beta_0=3),)
def params_close(self,d1,d2):
return np.allclose(d1.lmbda,d2.lmbda,rtol=0.05)
def geweke_statistics(self,d,data):
return d.lmbda
@attr('negbinfixedr')
class TestNegativeBinomialFixedR(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.NegativeBinomialFixedR
@property
def hyperparameter_settings(self):
return (dict(r=5,alpha_0=1,beta_0=9),)
def params_close(self,d1,d2):
return np.allclose(d1.p,d2.p,rtol=0.1)
def geweke_statistics(self,d,data):
return d.p
@attr('negbinintr')
class TestNegativeBinomialIntegerR(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.NegativeBinomialIntegerR
@property
def hyperparameter_settings(self):
return (dict(r_discrete_distn=np.r_[0.,0,0,1,1,1],alpha_0=5,beta_0=5),)
def params_close(self,d1,d2):
# since it's easy to be off by 1 in r and still look like the same
# distribution, best just to check moment parameters
def mean(d):
return d.r*d.p/(1.-d.p)
def var(d):
return mean(d)/(1.-d.p)
return np.allclose(mean(d1),mean(d2),rtol=0.1) and np.allclose(var(d1),var(d2),rtol=0.1)
def geweke_statistics(self,d,data):
return d.p
@property
def geweke_pval(self):
return 0.005 # since the statistic is on (0,1), it's really sensitive, or something
@attr('negbinintr2')
class TestNegativeBinomialIntegerR2(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.NegativeBinomialIntegerR2
@property
def hyperparameter_settings(self):
return (dict(r_discrete_distn=np.r_[0.,0,0,1,1,1],alpha_0=5,beta_0=5),)
def params_close(self,d1,d2):
# since it's easy to be off by 1 in r and still look like the same
# distribution, best just to check moment parameters
def mean(d):
return d.r*d.p/(1.-d.p)
def var(d):
return mean(d)/(1.-d.p)
return np.allclose(mean(d1),mean(d2),rtol=0.1) and np.allclose(var(d1),var(d2),rtol=0.1)
def geweke_statistics(self,d,data):
return d.p
@property
def geweke_pval(self):
return 0.005 # since the statistic is on (0,1), it's really sensitive, or something
@attr('negbinintrvariant')
class TestNegativeBinomialIntegerRVariant(TestNegativeBinomialIntegerR):
@property
def distribution_class(self):
return distributions.NegativeBinomialIntegerRVariant
@attr('categorical')
class TestCategorical(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.Categorical
@property
def hyperparameter_settings(self):
return (dict(alpha_0=5.,K=5),)
@property
def big_data_size(self):
return 20000
def params_close(self,d1,d2):
return np.allclose(d1.weights,d2.weights,atol=0.05)
def geweke_statistics(self,d,data):
return d.weights
@property
def geweke_pval(self):
return 0.05
@attr('gaussian')
class TestGaussian(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.Gaussian
@property
def hyperparameter_settings(self):
return (dict(mu_0=np.zeros(2),sigma_0=np.eye(2),kappa_0=1.,nu_0=4.),)
def params_close(self,d1,d2):
return np.linalg.norm(d1.mu-d2.mu) < 0.1 and np.linalg.norm(d1.sigma-d2.sigma) < 0.1
def geweke_statistics(self,d,data):
return np.concatenate((d.mu,np.diag(d.sigma)))
@property
def geweke_nsamples(self):
return 30000
@property
def geweke_data_size(self):
return 1
@property
def geweke_pval(self):
return 0.05
def geweke_numerical_slice(self,d,setting_idx):
return slice(0,d.mu.shape[0])
@attr('diagonalgaussian')
class TestDiagonalGaussian(BigDataGibbsTester,GewekeGibbsTester,BasicTester):
@property
def distribution_class(self):
return distributions.DiagonalGaussian
@property
def hyperparameter_settings(self):
return (dict(mu_0=np.zeros(2),nus_0=7,alphas_0=np.r_[5.,10.],betas_0=np.r_[1.,4.]),)
def params_close(self,d1,d2):
return np.linalg.norm(d1.mu-d2.mu) < 0.1 and np.linalg.norm(d1.sigmas-d2.sigmas) < 0.25
def geweke_statistics(self,d,data):
return np.concatenate((d.mu,d.sigmas))
@property
def geweke_nsamples(self):
return 50000
@property
def geweke_data_size(self):
return 2
@property
def geweke_pval(self):
return 0.05
def geweke_numerical_slice(self,d,setting_idx):
return slice(0,d.mu.shape[0])
### class-specific
def test_log_likelihood(self):
data = np.random.randn(1000,100)
mu = np.random.randn(100)
sigmas = np.random.uniform(1,2,size=100)
d = distributions.DiagonalGaussian(mu=mu,sigmas=sigmas)
pdf1 = d.log_likelihood(data)
import scipy.stats as stats
pdf2 = stats.norm.logpdf(data,loc=mu,scale=np.sqrt(sigmas)).sum(1)
assert np.allclose(pdf1,pdf2)
def test_log_likelihood2(self):
data = np.random.randn(1000,600)
mu = np.random.randn(600)
sigmas = np.random.uniform(1,2,size=600)
d = distributions.DiagonalGaussian(mu=mu,sigmas=sigmas)
pdf1 = d.log_likelihood(data)
import scipy.stats as stats
pdf2 = stats.norm.logpdf(data,loc=mu,scale=np.sqrt(sigmas)).sum(1)
assert np.allclose(pdf1,pdf2)
@attr('diagonalgaussiannonconj')
class TestDiagonalGaussianNonconjNIG(BigDataGibbsTester,GewekeGibbsTester,BasicTester):
@property
def distribution_class(self):
return distributions.DiagonalGaussianNonconjNIG
@property
def hyperparameter_settings(self):
return (
dict(mu_0=np.zeros(2),sigmas_0=np.ones(2),alpha_0=np.ones(2),beta_0=np.ones(2)),
dict(mu_0=np.zeros(600),sigmas_0=np.ones(600),alpha_0=np.ones(600),beta_0=np.ones(600)),
)
def params_close(self,d1,d2):
return np.linalg.norm(d1.mu - d2.mu) < 0.1*np.sqrt(d1.mu.shape[0]) \
and np.linalg.norm(d1.sigmas-d2.sigmas) < 0.25*d1.sigmas.shape[0]
def geweke_statistics(self,d,data):
return np.concatenate((d.mu,d.sigmas))
@property
def geweke_nsamples(self):
return 5000
@property
def geweke_data_size(self):
return 2
@property
def geweke_pval(self):
return 0.05
def geweke_numerical_slice(self,d,setting_idx):
return slice(0,d.mu.shape[0])
### class-specific
def test_log_likelihood(self):
data = np.random.randn(1000,100)
mu = np.random.randn(100)
sigmas = np.random.uniform(1,2,size=100)
d = distributions.DiagonalGaussian(mu=mu,sigmas=sigmas)
pdf1 = d.log_likelihood(data)
import scipy.stats as stats
pdf2 = stats.norm.logpdf(data,loc=mu,scale=np.sqrt(sigmas)).sum(1)
assert np.allclose(pdf1,pdf2)
def test_log_likelihood2(self):
data = np.random.randn(1000,600)
mu = np.random.randn(600)
sigmas = np.random.uniform(1,2,size=600)
d = distributions.DiagonalGaussian(mu=mu,sigmas=sigmas)
pdf1 = d.log_likelihood(data)
import scipy.stats as stats
pdf2 = stats.norm.logpdf(data,loc=mu,scale=np.sqrt(sigmas)).sum(1)
assert np.allclose(pdf1,pdf2)
@attr('gaussianfixedmean')
class TestGaussianFixedMean(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.GaussianFixedMean
@property
def hyperparameter_settings(self):
return (dict(mu=np.array([1.,2.,3.]),nu_0=5,lmbda_0=np.diag([3.,2.,1.])),)
def params_close(self,d1,d2):
return np.linalg.norm(d1.sigma - d2.sigma) < 0.25
def geweke_statistics(self,d,data):
return np.diag(d.sigma)
@property
def geweke_nsamples(self):
return 25000
@property
def geweke_data_size(self):
return 5
@property
def geweke_pval(self):
return 0.05
@attr('gaussianfixedcov')
class TestGaussianFixedCov(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.GaussianFixedCov
@property
def hyperparameter_settings(self):
return (dict(sigma=np.diag([3.,2.,1.]),mu_0=np.array([1.,2.,3.]),lmbda_0=np.eye(3)),)
def params_close(self,d1,d2):
return np.linalg.norm(d1.mu-d2.mu) < 0.1
def geweke_statistics(self,d,data):
return d.mu
@property
def geweke_nsamples(self):
return 25000
@property
def geweke_data_size(self):
return 5
@property
def geweke_pval(self):
return 0.05
@attr('gaussiannonconj')
class TestGaussianNonConj(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.GaussianNonConj
@property
def hyperparameter_settings(self):
return (dict(mu_0=np.zeros(2),mu_lmbda_0=2*np.eye(2),nu_0=5,sigma_lmbda_0=np.eye(2)),)
def params_close(self,d1,d2):
return np.linalg.norm(d1.mu-d2.mu) < 0.1 and np.linalg.norm(d1.sigma-d2.sigma) < 0.25
def geweke_statistics(self,d,data):
return np.concatenate((d.mu,np.diag(d.sigma)))
@property
def geweke_nsamples(self):
return 30000
@property
def geweke_data_size(self):
return 1
@property
def geweke_pval(self):
return 0.05
def geweke_numerical_slice(self,d,setting_idx):
return slice(0,d.mu.shape[0])
@property
def resample_kwargs(self):
return dict(niter=10)
@attr('scalargaussiannix')
class TestScalarGaussianNIX(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.ScalarGaussianNIX
@property
def hyperparameter_settings(self):
return (dict(mu_0=2.7,kappa_0=2.,sigmasq_0=4.,nu_0=2),)
def params_close(self,d1,d2):
return np.abs(d1.mu-d2.mu) < 0.1 and np.abs(d2.sigmasq - d2.sigmasq) < 0.25
def geweke_statistics(self,d,data):
return np.array((d.mu,d.sigmasq))
@property
def geweke_nsamples(self):
return 30000
@property
def geweke_data_size(self):
return 2
@property
def geweke_pval(self):
return 0.05
def geweke_numerical_slice(self,d,setting_idx):
return slice(0,1)
@attr('scalargaussiannonconjnix')
class TestScalarGaussianNonconjNIX(BigDataGibbsTester,GewekeGibbsTester):
@property
def distribution_class(self):
return distributions.ScalarGaussianNonconjNIX
@property
def hyperparameter_settings(self):
return (dict(mu_0=2.7,tausq_0=4.,sigmasq_0=2.,nu_0=2),)
def params_close(self,d1,d2):
return np.abs(d1.mu-d2.mu) < 0.1 and np.abs(d2.sigmasq - d2.sigmasq) < 0.25
def geweke_statistics(self,d,data):
return np.array((d.mu,d.sigmasq))
@property
def geweke_nsamples(self):
return 30000
@property
def geweke_data_size(self):
return 2
@property
def geweke_pval(self):
return 0.05
def geweke_numerical_slice(self,d,setting_idx):
return slice(0,1)
@attr('CRP')
class TestCRP(BigDataGibbsTester):
@property
def distribution_class(self):
return distributions.CRP
@property
def hyperparameter_settings(self):
return (dict(a_0=1.,b_0=1./10),)
@property
def big_data_size(self):
return [50]*200
def params_close(self,d1,d2):
return np.abs(d1.concentration - d2.concentration) < 1.0
@attr('GammaCompoundDirichlet')
class TestDirichletCompoundGamma(object):
def test_weaklimit(self):
a = distributions.CRP(10,1)
b = distributions.GammaCompoundDirichlet(1000,10,1)
a.concentration = b.concentration = 10.
from matplotlib import pyplot as plt
plt.figure()
crp_counts = np.zeros(10)
gcd_counts = np.zeros(10)
for itr in range(500):
crp_rvs = np.sort(a.rvs(25))[::-1][:10]
crp_counts[:len(crp_rvs)] += crp_rvs
gcd_counts += np.sort(b.rvs(25))[::-1][:10]
plt.plot(crp_counts/200,gcd_counts/200,'bx-')
plt.xlim(0,10)
plt.ylim(0,10)
import os
figpath = os.path.join(os.path.dirname(__file__),'figures',
self.__class__.__name__,'weaklimittest.pdf')
mkdir(os.path.dirname(figpath))
plt.savefig(figpath)
| 27.509018 | 104 | 0.661397 | 1,861 | 13,727 | 4.763568 | 0.118216 | 0.076932 | 0.053694 | 0.050536 | 0.761985 | 0.75612 | 0.75612 | 0.726791 | 0.715849 | 0.576763 | 0 | 0.048041 | 0.219057 | 13,727 | 498 | 105 | 27.564257 | 0.778918 | 0.029067 | 0 | 0.68595 | 0 | 0 | 0.019904 | 0.005183 | 0 | 0 | 0 | 0 | 0.011019 | 1 | 0.292011 | false | 0 | 0.030303 | 0.272727 | 0.647383 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
78d4ae695118aa145ae32d8a5cc7e1ff6e469e26 | 21,940 | py | Python | iperf/lib/common.py | philip-shen/note_python | db0ad84af25464a22ac52e348960107c81e74a56 | [
"MIT"
] | null | null | null | iperf/lib/common.py | philip-shen/note_python | db0ad84af25464a22ac52e348960107c81e74a56 | [
"MIT"
] | 11 | 2021-02-08T20:45:23.000Z | 2022-03-12T01:00:11.000Z | iperf/lib/common.py | philip-shen/note_python | db0ad84af25464a22ac52e348960107c81e74a56 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import os,time,sys
import sys
import iperf3
import socket
import struct
import threading
from logger import logger
def server_siteone(parms):
server = iperf3.Server()
server.port = parms['port']
server.protocol = parms['protocol']
#print('Running server: {0}:{1}'.format(server.bind_address, server.port))
logger.info('Running server: {0}:{1}:{2}'.format(server.bind_address, server.port, server.protocol))
while True:
try:
result = server.run()
if result.error:
#print(result.error)
logger.info(result.error)
else:
print('')
logger.info('Test results from {0}:{1} to {2}'.format(result.remote_host,
result.remote_port,result.local_port))
logger.info(' started at {0}'.format(result.time))
logger.info(' bytes received {0}'.format(result.received_bytes))
logger.info('Average transmitted received in all sorts of networky formats:')
logger.info(' bits per second (bps) {0}'.format(result.received_bps))
logger.info(' Kilobits per second (kbps) {0}'.format(result.received_kbps))
logger.info(' Megabits per second (Mbps) {0}'.format(result.received_Mbps))
logger.info(' KiloBytes per second (kB/s) {0}'.format(result.received_kB_s))
logger.info(' MegaBytes per second (MB/s) {0}'.format(result.received_MB_s))
except KeyboardInterrupt:
pass
def client_siteone(parms):
client = iperf3.Client()
client.duration = 1
client.server_hostname = parms['remote_server_ip']
client.port = parms['port']
client.protocol = parms['protocol']
client.duration = int(parms['test_duration'])
logger.info('Connecting to {0}:{1}:{2}'.format(client.server_hostname, client.port, client.protocol))
result = client.run()
if result.error:
logger.info(result.error)
else:
print('')
logger.info('Test completed:')
logger.info(' started at {0}'.format(result.time))
logger.info(' bytes transmitted {0}'.format(result.sent_bytes))
logger.info(' retransmits {0}'.format(result.retransmits))
logger.info(' avg cpu load {0}%\n'.format(result.local_cpu_total))
logger.info('Average transmitted data in all sorts of networky formats:')
logger.info(' bits per second (bps) {0}'.format(result.sent_bps))
logger.info(' Kilobits per second (kbps) {0}'.format(result.sent_kbps))
logger.info(' Megabits per second (Mbps) {0}'.format(result.sent_Mbps))
logger.info(' KiloBytes per second (kB/s) {0}'.format(result.sent_kB_s))
logger.info(' MegaBytes per second (MB/s) {0}'.format(result.sent_MB_s))
def client_siteone_udp(parms):
client = iperf3.Client()
client.duration = 1
client.server_hostname = parms['remote_server_ip']
client.port = parms['port']
client.protocol = parms['protocol']
logger.info('Connecting to {0}:{1}:{2}'.format(client.server_hostname, client.port, client.protocol))
result = client.run()
if result.error:
logger.info(result.error)
else:
print('')
logger.info('Test completed:')
logger.info(' started at {0}'.format(result.time))
logger.info(' bytes transmitted {0}'.format(result.bytes))
logger.info(' jitter (ms) {0}'.format(result.jitter_ms))
logger.info(' avg cpu load {0}%\n'.format(result.local_cpu_total))
logger.info('Average transmitted data in all sorts of networky formats:')
logger.info(' bits per second (bps) {0}'.format(result.bps))
logger.info(' Kilobits per second (kbps) {0}'.format(result.kbps))
logger.info(' Megabits per second (Mbps) {0}'.format(result.Mbps))
logger.info(' KiloBytes per second (kB/s) {0}'.format(result.kB_s))
logger.info(' MegaBytes per second (MB/s) {0}'.format(result.MB_s))
#https://subscription.packtpub.com/book/networking_and_servers/9781786463999/1/ch01lvl1sec24/writing-a-simple-udp-echo-client-server-application
def server_siteone_socket_udp(parms):
""" A simple echo server """
server_host = parms['host']
server_port = parms['port']
server_protocol = parms['protocol']
data_payload = 2048
# Create a UDP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Bind the socket to the port(type int)
server_address = (server_host, int(server_port))
logger.info("Starting up echo server on %s port %s" % server_address)
sock.bind(server_address)
while True:
try:
#print (" ")
logger.info("Waiting to receive message from client")
data, address = sock.recvfrom(data_payload)
logger.info("received %s bytes from %s to port: %s" % (len(data), address, server_port))
logger.info("Data: %s" %data)
if data:
sent = sock.sendto(data, address)
logger.info("sent %s bytes back to %s" % (sent, address))
except KeyboardInterrupt:
pass
def client_siteone_socket_udp(parms):
client_server_hostname = parms['remote_server_ip']
client_port = parms['port']
client_protocol = parms['protocol']
data_payload = 2048
""" A simple echo client """
# Create a UDP socket
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
server_address = (client_server_hostname, int(client_port))
logger.info("Connecting to %s port %s" % server_address)
try:
# Send data
message = "Test message. This will be echoed"
logger.info("Sending %s" % message)
sent = sock.sendto(message.encode('utf-8'), server_address)
# Receive response
data, server = sock.recvfrom(data_payload)
logger.info("received %s" % data)
finally:
logger.info("Closing connection to the server")
sock.close()
# https://gist.github.com/Lothiraldan/3951784
ANY = "0.0.0.0"
def server_siteone_socket_udp_mutlicast(parms):
# Socket part
server_host = parms['host']
server_port = parms['port']
server_protocol = parms['protocol']
#MCAST_ADDR = "237.252.249.227"
#MCAST_PORT = 1600
#create a UDP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
#allow multiple sockets to use the same PORT number
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
# Bind the socket to the port(type int)
server_address = (server_host, int(server_port))
logger.info("Starting up Multicast server on %s port %s" % server_address)
#Bind to the port that we know will receive multicast data
#sock.bind((ANY, int(server_port)))
sock.bind(server_address)
#tell the kernel that we are a multicast socket
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 255)
#Tell the kernel that we want to add ourselves to a multicast group
#The address for the multicast group is the third param
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(server_host) + socket.inet_aton(ANY))
while True:
try:
logger.info("Waiting to receive message from client")
data, addr = sock.recvfrom(1024)
logger.info("received %s bytes from %s to port: %s" % (len(data), addr, server_port))
#https://stackoverflow.com/questions/33003498/typeerror-a-bytes-like-object-is-required-not-str
logger.info ("Data: {}, addr: {}".format(data.decode(), addr))
except socket.error:
pass
except KeyboardInterrupt:
pass
def client_siteone_socket_udp_mutlicast(parms):
# Socket part
client_server_hostname = parms['remote_server_ip']
client_port = parms['port']
client_protocol = parms['protocol']
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM,
socket.IPPROTO_UDP)
sock.bind((ANY, 0))
sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 255)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_address = (client_server_hostname, int(client_port))
logger.info("Connecting to %s port %s" % server_address)
message = "Test message. It will be Mutlicast"
logger.info("Sending %s" % message)
#https://stackoverflow.com/questions/33003498/typeerror-a-bytes-like-object-is-required-not-str
sock.sendto(message.encode(), (client_server_hostname, int(client_port)) )
# https://gist.github.com/tuxmartin/e64d2132061ffef7e031
def server_siteone_socket_udp_ipv6(parms):
""" A simple echo server """
server_host = parms['host']
server_port = parms['port']
server_protocol = parms['protocol']
data_payload = 2048
# Create a UDP socket
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
# Bind the socket to the port(type int)
server_address = (server_host, int(server_port))
logger.info("Starting up UDP echo server on %s port %s" % server_address)
sock.bind(server_address)
while True:
try:
#print (" ")
logger.info("Waiting to receive message from client")
data, address = sock.recvfrom(data_payload)
logger.info("received %s bytes from %s to port: %s" % (len(data), address, server_port))
logger.info("Data: %s" %data)
if data:
sent = sock.sendto(data, address)
logger.info("sent %s bytes back to %s" % (sent, address))
except KeyboardInterrupt:
pass
def client_siteone_socket_udp_ipv6(parms):
client_server_hostname = parms['remote_server_ip']
client_port = parms['port']
client_protocol = parms['protocol']
data_payload = 2048
""" A simple echo client """
# Create a UDP socket
sock = socket.socket(socket.AF_INET6,socket.SOCK_DGRAM)
server_address = (client_server_hostname, int(client_port))
logger.info("Connecting to %s port %s" % server_address)
try:
# Send data
message = "Test message. This will be echoed"
logger.info("Sending %s" % message)
sent = sock.sendto(message.encode('utf-8'), server_address)
# Receive response
data, server = sock.recvfrom(data_payload)
logger.info("received %s" % data)
finally:
logger.info("Closing connection to the server")
sock.close()
def server_siteone_socket_udp_mutlicast_ipv6(parms):
# Socket part
server_host = parms['host']
server_port = parms['port']
server_protocol = parms['protocol']
# Look up multicast group address in name server and find out IP version
addrinfo = socket.getaddrinfo(server_host, None)[0]
# Create a socket
#sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock = socket.socket(addrinfo[0], socket.SOCK_DGRAM, socket.IPPROTO_UDP)
#allow multiple sockets to use the same PORT number
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
# Bind the socket to the port(type int)
server_address = (server_host, int(server_port))
logger.info("Starting up Multicast server on %s port %s" % server_address)
#Bind to the port that we know will receive multicast data
sock.bind(server_address)
#tell the kernel that we are a multicast socket
#sock.setsockopt(socket.IPPROTO_IPV6, socket.IP_MULTICAST_TTL, 255)
group_bin = socket.inet_pton(addrinfo[0], addrinfo[4][0])
#Tell the kernel that we want to add ourselves to a multicast group
#The address for the multicast group is the third param
mreq = group_bin + struct.pack('@I', 0)
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq)
while True:
try:
logger.info("Waiting to receive message from client")
data, addr = sock.recvfrom(1024)
logger.info("received %s bytes from %s to port: %s" % (len(data), addr, server_port))
#https://stackoverflow.com/questions/33003498/typeerror-a-bytes-like-object-is-required-not-str
logger.info ("Data: {}, addr: {}".format(data.decode(), addr))
except socket.error:
pass
except KeyboardInterrupt:
pass
MYTTL = 1 # Increase to reach other networks
def client_siteone_socket_udp_mutlicast_ipv6(parms):
# Socket part
client_server_hostname = parms['remote_server_ip']
client_port = parms['port']
client_protocol = parms['protocol']
# Look up multicast group address in name server and find out IP version
addrinfo = socket.getaddrinfo(client_server_hostname, None)[0]
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM,socket.IPPROTO_UDP)
#sock.bind((ANY, 0))
# Set Time-to-live (optional)
ttl_bin = struct.pack('@i', MYTTL)
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, ttl_bin)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_address = (client_server_hostname, int(client_port))
logger.info("Connecting to %s port %s" % server_address)
message = "Test message. It will be Mutlicast"
logger.info("Sending %s" % message)
#https://stackoverflow.com/questions/33003498/typeerror-a-bytes-like-object-is-required-not-str
sock.sendto(message.encode(), (client_server_hostname, int(client_port)) )
def func():
pass
def continuous_clock_timer(duration_sec):
#print('Timer start,press"Enter" button to calcuate interval,Press "Ctrl+C" to escape timer.')
#in_value = input()
#if in_value == 'exit':
# return
print('Start Time Duratin:{} sec(s)!!!'.format(duration_sec))
start_time = time.time()
last_time = start_time
total_time = round(time.time() - start_time, 2)
lap_num = 1
try:
while True:
if total_time > duration_sec:
print('Duratin:{}sec(s) End Timer!!!'.format(duration_sec))
break
lap_time = round(time.time() - last_time, 2)
total_time = round(time.time() - start_time, 2)
print("{}: {} {}".format(lap_num, total_time, lap_time))
timer = threading.Timer(0,func)
timer.start()
time.sleep(2) ## 等待2s
timer.cancel()##停止定時器
#print("5s到了定時器退出")
lap_num += 1
last_time = time.time()
except KeyboardInterrupt:
print('\nEnd Timer!!!')
exit()
def client_siteone_continuous(parms):
client = iperf3.Client()
client.duration = 1
client.server_hostname = parms['remote_server_ip']
client.port = parms['port']
client.protocol = parms['protocol']
test_duration = parms['test_duration']
#print('Start Time Duratin:{} sec(s)!!!'.format(test_duration))
logger.info('Start Time Duratin:{} sec(s)!!!'.format(test_duration))
start_time = time.time()
last_time = start_time
total_time = round(time.time() - start_time, 2)
lap_num = 1
try:
while True:
if total_time > int(test_duration):
#print('Duratin:{}sec(s) End Timer!!!'.format(test_duration))
logger.info('Duratin:{}sec(s) End Timer!!!'.format(test_duration))
break
lap_time = round(time.time() - last_time, 2)
total_time = round(time.time() - start_time, 2)
# Start iperf client traffic
logger.info('Connecting to {0}:{1}:{2}'.format(client.server_hostname, client.port, client.protocol))
result = client.run()
if result.error:
logger.info(result.error)
else:
print('')
logger.info('Test completed:')
logger.info(' started at {0}'.format(result.time))
logger.info(' bytes transmitted {0}'.format(result.sent_bytes))
logger.info(' retransmits {0}'.format(result.retransmits))
logger.info(' avg cpu load {0}%\n'.format(result.local_cpu_total))
logger.info('Average transmitted data in all sorts of networky formats:')
logger.info(' bits per second (bps) {0}'.format(result.sent_bps))
logger.info(' Kilobits per second (kbps) {0}'.format(result.sent_kbps))
logger.info(' Megabits per second (Mbps) {0}'.format(result.sent_Mbps))
logger.info(' KiloBytes per second (kB/s) {0}'.format(result.sent_kB_s))
logger.info(' MegaBytes per second (MB/s) {0}'.format(result.sent_MB_s))
# End iperf client traffic
timer = threading.Timer(0,func)
timer.start()
time.sleep(3) ## 等待2s
timer.cancel()##停止定時器
lap_num += 1
last_time = time.time()
except KeyboardInterrupt:
#print('\nEnd Timer!!!')
logger.info('\nEnd Timer!!!')
exit()
def client_siteone_socket_udp_continuous(parms):
client_server_hostname = parms['remote_server_ip']
client_port = parms['port']
client_protocol = parms['protocol']
data_payload = 2048
test_duration = parms['test_duration']
#print('Start Time Duratin:{} sec(s)!!!'.format(test_duration))
logger.info('Start Time Duratin:{} sec(s)!!!'.format(test_duration))
start_time = time.time()
last_time = start_time
total_time = round(time.time() - start_time, 2)
lap_num = 1
try:
while True:
if total_time > int(test_duration):
#print('Duratin:{}sec(s) End Timer!!!'.format(test_duration))
logger.info('Duratin:{}sec(s) End Timer!!!'.format(test_duration))
break
lap_time = round(time.time() - last_time, 2)
total_time = round(time.time() - start_time, 2)
#print("{}: {} {}".format(lap_num, total_time, lap_time))
# Start client traffic
""" A simple echo client """
# Create a UDP socket
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
server_address = (client_server_hostname, int(client_port))
logger.info("Connecting to %s port %s" % server_address)
try:
# Send data
message = "Test message. This will be echoed"
logger.info("Sending %s" % message)
sent = sock.sendto(message.encode('utf-8'), server_address)
# Receive response
data, server = sock.recvfrom(data_payload)
logger.info("received %s" % data)
finally:
logger.info("Closing connection to the server")
sock.close()
# End client traffic
timer = threading.Timer(0,func)
timer.start()
time.sleep(3) ## 等待2s
timer.cancel()##停止定時器
lap_num += 1
last_time = time.time()
except KeyboardInterrupt:
#print('\nEnd Timer!!!')
logger.info('\nEnd Timer!!!')
exit()
def client_siteone_socket_udp_ipv6_continuous(parms):
client_server_hostname = parms['remote_server_ip']
client_port = parms['port']
client_protocol = parms['protocol']
data_payload = 2048
test_duration = parms['test_duration']
logger.info('Start Time Duratin:{} sec(s)!!!'.format(test_duration))
start_time = time.time()
last_time = start_time
total_time = round(time.time() - start_time, 2)
lap_num = 1
try:
while True:
if total_time > int(test_duration):
#print('Duratin:{}sec(s) End Timer!!!'.format(test_duration))
logger.info('Duratin:{}sec(s) End Timer!!!'.format(test_duration))
break
lap_time = round(time.time() - last_time, 2)
total_time = round(time.time() - start_time, 2)
#print("{}: {} {}".format(lap_num, total_time, lap_time))
# Start client traffic
""" A simple echo client """
# Create a UDP socket
sock = socket.socket(socket.AF_INET6,socket.SOCK_DGRAM)
server_address = (client_server_hostname, int(client_port))
logger.info("Connecting to %s port %s" % server_address)
try:
# Send data
message = "Test message. This will be echoed"
logger.info("Sending %s" % message)
sent = sock.sendto(message.encode('utf-8'), server_address)
# Receive response
data, server = sock.recvfrom(data_payload)
logger.info("received %s" % data)
finally:
logger.info("Closing connection to the server")
sock.close()
# End client traffic
timer = threading.Timer(0,func)
timer.start()
time.sleep(3) ## 等待2s
timer.cancel()##停止定時器
lap_num += 1
last_time = time.time()
except KeyboardInterrupt:
#print('\nEnd Timer!!!')
logger.info('\nEnd Timer!!!')
exit() | 37.697595 | 144 | 0.60474 | 2,665 | 21,940 | 4.836398 | 0.095685 | 0.075258 | 0.031267 | 0.015827 | 0.898053 | 0.88913 | 0.87563 | 0.859027 | 0.849019 | 0.845682 | 0 | 0.016249 | 0.276299 | 21,940 | 582 | 145 | 37.697595 | 0.795503 | 0.142479 | 0 | 0.813333 | 0 | 0 | 0.177116 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042667 | false | 0.021333 | 0.018667 | 0 | 0.061333 | 0.021333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
154bb00002d256eaf38fa95c2e2418f3c4830534 | 132 | py | Python | tests/year_2018/test_day5b.py | vanillaSlice/advent-of-code | 3f31be38c598040ec6032bc9b24856005e070c21 | [
"MIT"
] | null | null | null | tests/year_2018/test_day5b.py | vanillaSlice/advent-of-code | 3f31be38c598040ec6032bc9b24856005e070c21 | [
"MIT"
] | null | null | null | tests/year_2018/test_day5b.py | vanillaSlice/advent-of-code | 3f31be38c598040ec6032bc9b24856005e070c21 | [
"MIT"
] | null | null | null | from src.year_2018.day5b import shortest_polymer
def test_shortest_polymer():
assert shortest_polymer('dabAcCaCBAcCcaDA') == 4
| 26.4 | 52 | 0.80303 | 17 | 132 | 5.941176 | 0.764706 | 0.445545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.051282 | 0.113636 | 132 | 4 | 53 | 33 | 0.811966 | 0 | 0 | 0 | 0 | 0 | 0.121212 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
1554615d11b18785c91e5356bbf31b18bf5f6970 | 110 | py | Python | suport/g4/controllers/about.py | andrelrg/course | bb82070f3ddea9630702975277f105c77e8a5446 | [
"MIT"
] | null | null | null | suport/g4/controllers/about.py | andrelrg/course | bb82070f3ddea9630702975277f105c77e8a5446 | [
"MIT"
] | null | null | null | suport/g4/controllers/about.py | andrelrg/course | bb82070f3ddea9630702975277f105c77e8a5446 | [
"MIT"
] | null | null | null | from flask import render_template
class About():
def get():
return render_template('about.html') | 18.333333 | 44 | 0.690909 | 14 | 110 | 5.285714 | 0.785714 | 0.378378 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.209091 | 110 | 6 | 44 | 18.333333 | 0.850575 | 0 | 0 | 0 | 0 | 0 | 0.09009 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.25 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
ec6a7c0fb0f76ea6ab6b1d0a2029af4b87a8e8cc | 7,876 | py | Python | unittests/scales/test_lin.py | xxao/pero | a7f0c84fae0b21fe120204e798bd61cdab3a125d | [
"MIT"
] | 13 | 2019-07-15T17:51:21.000Z | 2022-03-15T06:13:43.000Z | unittests/scales/test_lin.py | xxao/pero | a7f0c84fae0b21fe120204e798bd61cdab3a125d | [
"MIT"
] | 1 | 2021-12-29T00:46:44.000Z | 2022-01-21T16:18:48.000Z | unittests/scales/test_lin.py | xxao/pero | a7f0c84fae0b21fe120204e798bd61cdab3a125d | [
"MIT"
] | 3 | 2020-09-27T14:31:45.000Z | 2022-01-22T14:28:15.000Z | # Created byMartin.cz
# Copyright (c) Martin Strohalm. All rights reserved.
import unittest
import pero
import numpy
class TestCase(unittest.TestCase):
"""Test case for linear interpolator."""
def test_positive(self):
"""Tests whether interpolator works correctly for positive range."""
interpol = pero.LinInterpol()
# test inside
self.assertEqual(interpol.normalize(3, 2, 4), 0.5)
self.assertEqual(interpol.denormalize(0.5, 2, 4), 3)
# test left
self.assertEqual(interpol.normalize(1, 2, 4), -0.5)
self.assertEqual(interpol.denormalize(-0.5, 2, 4), 1)
# test right
self.assertEqual(interpol.normalize(5, 2, 4), 1.5)
self.assertEqual(interpol.denormalize(1.5, 2, 4), 5)
# test zero
self.assertEqual(interpol.normalize(0, 2, 4), -1.0)
self.assertEqual(interpol.denormalize(-1.0, 2, 4), 0)
def test_positive_reversed(self):
"""Tests whether interpolator works correctly for positive reversed range."""
interpol = pero.LinInterpol()
# test inside
self.assertEqual(interpol.normalize(3, 4, 2), 0.5)
self.assertEqual(interpol.denormalize(0.5, 4, 2), 3)
# test left
self.assertEqual(interpol.normalize(5, 4, 2), -0.5)
self.assertEqual(interpol.denormalize(-0.5, 4, 2), 5)
# test right
self.assertEqual(interpol.normalize(1, 4, 2), 1.5)
self.assertEqual(interpol.denormalize(1.5, 4, 2), 1)
# test zero
self.assertEqual(interpol.normalize(0, 4, 2), 2.0)
self.assertEqual(interpol.denormalize(2.0, 4, 2), 0)
def test_negative(self):
"""Tests whether interpolator works correctly for negative range."""
interpol = pero.LinInterpol()
# test inside
self.assertEqual(interpol.normalize(-3, -4, -2), 0.5)
self.assertEqual(interpol.denormalize(0.5, -4, -2), -3)
# test left
self.assertEqual(interpol.normalize(-5, -4, -2), -0.5)
self.assertEqual(interpol.denormalize(-0.5, -4, -2), -5)
# test right
self.assertEqual(interpol.normalize(-1, -4, -2), 1.5)
self.assertEqual(interpol.denormalize(1.5, -4, -2), -1)
# test zero
self.assertEqual(interpol.normalize(0, -4, -2), 2.0)
self.assertEqual(interpol.denormalize(2.0, -4, -2), 0)
def test_negative_reversed(self):
"""Tests whether interpolator works correctly for negative reversed range."""
interpol = pero.LinInterpol()
# test inside
self.assertEqual(interpol.normalize(-3, -2, -4), 0.5)
self.assertEqual(interpol.denormalize(0.5, -2, -4), -3)
# test left
self.assertEqual(interpol.normalize(-1, -2, -4), -0.5)
self.assertEqual(interpol.denormalize(-0.5, -2, -4), -1)
# test right
self.assertEqual(interpol.normalize(-5, -2, -4), 1.5)
self.assertEqual(interpol.denormalize(1.5, -2, -4), -5)
# test zero
self.assertEqual(interpol.normalize(0, -2, -4), -1.0)
self.assertEqual(interpol.denormalize(-1.0, -2, -4), 0)
def test_zero_cross(self):
"""Tests whether interpolator works correctly for cross-zero range."""
interpol = pero.LinInterpol()
# test inside
self.assertEqual(interpol.normalize(2, -2, 6), 0.5)
self.assertEqual(interpol.denormalize(0.5, -2, 6), 2)
# test left
self.assertEqual(interpol.normalize(-4, -2, 6), -0.25)
self.assertEqual(interpol.denormalize(-0.25, -2, 6), -4)
# test right
self.assertEqual(interpol.normalize(8, -2, 6), 1.25)
self.assertEqual(interpol.denormalize(1.25, -2, 6), 8)
# test zero
self.assertEqual(interpol.normalize(0, -2, 6), 0.25)
self.assertEqual(interpol.denormalize(0.25, -2, 6), 0)
def test_zero_left(self):
"""Tests whether interpolator works correctly for left-zero range."""
interpol = pero.LinInterpol()
# test inside
self.assertEqual(interpol.normalize(2, 0, 4), 0.5)
self.assertEqual(interpol.denormalize(0.5, 0, 4), 2)
# test left
self.assertEqual(interpol.normalize(-2, 0, 4), -0.5)
self.assertEqual(interpol.denormalize(-0.5, 0, 4), -2)
# test right
self.assertEqual(interpol.normalize(6, 0, 4), 1.5)
self.assertEqual(interpol.denormalize(1.5, 0, 4), 6)
# test zero
self.assertEqual(interpol.normalize(0, 0, 4), 0)
self.assertEqual(interpol.denormalize(0, 0, 4), 0)
def test_zero_right(self):
"""Tests whether interpolator works correctly for right-zero range."""
interpol = pero.LinInterpol()
# test inside
self.assertEqual(interpol.normalize(-2, -4, 0), 0.5)
self.assertEqual(interpol.denormalize(0.5, -4, 0), -2)
# test left
self.assertEqual(interpol.normalize(-6, -4, 0), -0.5)
self.assertEqual(interpol.denormalize(-0.5, -4, 0), -6)
# test right
self.assertEqual(interpol.normalize(2, -4, 0), 1.5)
self.assertEqual(interpol.denormalize(1.5, -4, 0), 2)
# test zero
self.assertEqual(interpol.normalize(0, -4, 0), 1.0)
self.assertEqual(interpol.denormalize(1.0, -4, 0), 0)
def test_arrays(self):
"""Tests whether interpolator works correctly with arrays."""
interpol = pero.LinInterpol()
# test positive
data = [0, 1, 3, 5]
model = [-1., -0.5, 0.5, 1.5]
self.assertEqual(list(interpol.normalize(numpy.array(data), 2, 4)), model)
self.assertEqual(list(interpol.denormalize(numpy.array(model), 2, 4)), data)
# test positive reversed
data = [0, 1, 3, 5]
model = [2., 1.5, 0.5, -0.5]
self.assertEqual(list(interpol.normalize(numpy.array(data), 4, 2)), model)
self.assertEqual(list(interpol.denormalize(numpy.array(model), 4, 2)), data)
# test negative
data = [0, -1, -3, -5]
model = [2., 1.5, 0.5, -0.5]
self.assertEqual(list(interpol.normalize(numpy.array(data), -4, -2)), model)
self.assertEqual(list(interpol.denormalize(numpy.array(model), -4, -2)), data)
# test negative reversed
data = [0, -1, -3, -5]
model = [-1., -0.5, 0.5, 1.5]
self.assertEqual(list(interpol.normalize(numpy.array(data), -2, -4)), model)
self.assertEqual(list(interpol.denormalize(numpy.array(model), -2, -4)), data)
# test zero cross
data = [-4, 0, 2, 8]
model = [-0.25, 0.25, 0.5, 1.25]
self.assertEqual(list(interpol.normalize(numpy.array(data), -2, 6)), model)
self.assertEqual(list(interpol.denormalize(numpy.array(model), -2, 6)), data)
# test zero left
data = [-2, 0, 2, 6]
model = [-0.5, 0, 0.5, 1.5]
self.assertEqual(list(interpol.normalize(numpy.array(data), 0, 4)), model)
self.assertEqual(list(interpol.denormalize(numpy.array(model), 0, 4)), data)
# test zero right
data = [-6, -2, 0, 2]
model = [-0.5, 0.5, 1.0, 1.5]
self.assertEqual(list(interpol.normalize(numpy.array(data), -4, 0)), model)
self.assertEqual(list(interpol.denormalize(numpy.array(model), -4, 0)), data)
# run test case
if __name__ == "__main__":
unittest.main(verbosity=2)
| 35.004444 | 86 | 0.564627 | 988 | 7,876 | 4.479757 | 0.061741 | 0.237235 | 0.291008 | 0.20244 | 0.897198 | 0.874605 | 0.845459 | 0.77474 | 0.695888 | 0.65793 | 0 | 0.070448 | 0.28809 | 7,876 | 224 | 87 | 35.160714 | 0.718923 | 0.135221 | 0 | 0.150943 | 0 | 0 | 0.001189 | 0 | 0 | 0 | 0 | 0 | 0.660377 | 1 | 0.075472 | false | 0 | 0.028302 | 0 | 0.113208 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
eca46cd6de7cb26f0d91e23f587faa2e21b6a6ac | 5,389 | py | Python | imperative/python/test/unit/module/test_rnn.py | bealwang/MegEngine | df4153dc718b4544e720c58e439a0623c018cee2 | [
"Apache-2.0"
] | 1 | 2022-03-21T03:13:45.000Z | 2022-03-21T03:13:45.000Z | imperative/python/test/unit/module/test_rnn.py | bealwang/MegEngine | df4153dc718b4544e720c58e439a0623c018cee2 | [
"Apache-2.0"
] | null | null | null | imperative/python/test/unit/module/test_rnn.py | bealwang/MegEngine | df4153dc718b4544e720c58e439a0623c018cee2 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import numpy as np
import pytest
import megengine as mge
import megengine.functional as F
from megengine.device import get_device_count
from megengine.module import LSTM, RNN, LSTMCell, RNNCell
def assert_tuple_equal(src, ref):
assert len(src) == len(ref)
for i, j in zip(src, ref):
assert i == j
@pytest.mark.skipif(get_device_count("gpu") > 0, reason="no algorithm on cuda")
@pytest.mark.parametrize(
"batch_size, input_size, hidden_size, init_hidden",
[(3, 10, 20, True), (3, 10, 20, False), (1, 10, 20, False)],
)
def test_rnn_cell(batch_size, input_size, hidden_size, init_hidden):
rnn_cell = RNNCell(input_size, hidden_size)
x = mge.random.normal(size=(batch_size, input_size))
if init_hidden:
h = F.zeros(shape=(batch_size, hidden_size))
else:
h = None
h_new = rnn_cell(x, h)
assert_tuple_equal(h_new.shape, (batch_size, hidden_size))
@pytest.mark.skipif(get_device_count("gpu") > 0, reason="no algorithm on cuda")
@pytest.mark.parametrize(
"batch_size, input_size, hidden_size, init_hidden",
[(3, 10, 20, True), (3, 10, 20, False), (1, 10, 20, False)],
)
def test_lstm_cell(batch_size, input_size, hidden_size, init_hidden):
rnn_cell = LSTMCell(input_size, hidden_size)
x = mge.random.normal(size=(batch_size, input_size))
if init_hidden:
h = F.zeros(shape=(batch_size, hidden_size))
hx = (h, h)
else:
hx = None
h_new, c_new = rnn_cell(x, hx)
assert_tuple_equal(h_new.shape, (batch_size, hidden_size))
assert_tuple_equal(c_new.shape, (batch_size, hidden_size))
@pytest.mark.skipif(get_device_count("gpu") > 0, reason="no algorithm on cuda")
@pytest.mark.parametrize(
"batch_size, seq_len, input_size, hidden_size, num_layers, bidirectional, init_hidden, batch_first",
[
(3, 6, 10, 20, 2, False, False, True),
pytest.param(
3,
3,
10,
10,
1,
True,
True,
False,
marks=pytest.mark.skip(reason="bidirectional will cause cuda oom"),
),
],
)
def test_rnn(
batch_size,
seq_len,
input_size,
hidden_size,
num_layers,
bidirectional,
init_hidden,
batch_first,
):
rnn = RNN(
input_size,
hidden_size,
batch_first=batch_first,
num_layers=num_layers,
bidirectional=bidirectional,
)
if batch_first:
x_shape = (batch_size, seq_len, input_size)
else:
x_shape = (seq_len, batch_size, input_size)
x = mge.random.normal(size=x_shape)
total_hidden_size = num_layers * (2 if bidirectional else 1) * hidden_size
if init_hidden:
h = mge.random.normal(size=(batch_size, total_hidden_size))
else:
h = None
output, h_n = rnn(x, h)
num_directions = 2 if bidirectional else 1
if batch_first:
assert_tuple_equal(
output.shape, (batch_size, seq_len, num_directions * hidden_size)
)
else:
assert_tuple_equal(
output.shape, (seq_len, batch_size, num_directions * hidden_size)
)
assert_tuple_equal(
h_n.shape, (num_directions * num_layers, batch_size, hidden_size)
)
@pytest.mark.skipif(get_device_count("gpu") > 0, reason="no algorithm on cuda")
@pytest.mark.parametrize(
"batch_size, seq_len, input_size, hidden_size, num_layers, bidirectional, init_hidden, batch_first",
[
(3, 10, 20, 20, 1, False, False, True),
pytest.param(
3,
3,
10,
10,
1,
True,
True,
False,
marks=pytest.mark.skip(reason="bidirectional will cause cuda oom"),
),
],
)
def test_lstm(
batch_size,
seq_len,
input_size,
hidden_size,
num_layers,
bidirectional,
init_hidden,
batch_first,
):
rnn = LSTM(
input_size,
hidden_size,
batch_first=batch_first,
num_layers=num_layers,
bidirectional=bidirectional,
)
if batch_first:
x_shape = (batch_size, seq_len, input_size)
else:
x_shape = (seq_len, batch_size, input_size)
x = mge.random.normal(size=x_shape)
total_hidden_size = num_layers * (2 if bidirectional else 1) * hidden_size
if init_hidden:
h = mge.random.normal(size=(batch_size, total_hidden_size))
h = (h, h)
else:
h = None
output, h_n = rnn(x, h)
num_directions = 2 if bidirectional else 1
if batch_first:
assert_tuple_equal(
output.shape, (batch_size, seq_len, num_directions * hidden_size)
)
else:
assert_tuple_equal(
output.shape, (seq_len, batch_size, num_directions * hidden_size)
)
assert_tuple_equal(
h_n[0].shape, (num_directions * num_layers, batch_size, hidden_size)
)
assert_tuple_equal(
h_n[1].shape, (num_directions * num_layers, batch_size, hidden_size)
)
| 29.938889 | 104 | 0.633698 | 740 | 5,389 | 4.348649 | 0.164865 | 0.093226 | 0.087011 | 0.070851 | 0.811063 | 0.808266 | 0.808266 | 0.7977 | 0.7977 | 0.760721 | 0 | 0.021095 | 0.261087 | 5,389 | 179 | 105 | 30.106145 | 0.787042 | 0.066803 | 0 | 0.725 | 0 | 0 | 0.089243 | 0 | 0 | 0 | 0 | 0 | 0.08125 | 1 | 0.03125 | false | 0 | 0.0375 | 0 | 0.06875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
01f3406d664d93a0f7d609d51ceefce9f01ae170 | 707 | py | Python | tests/c/test_writebatch_wi.py | zeroae/zeroae-rocksdb | 3f1c81a42f27091161727c94a0d5620c25fd984f | [
"Apache-2.0"
] | null | null | null | tests/c/test_writebatch_wi.py | zeroae/zeroae-rocksdb | 3f1c81a42f27091161727c94a0d5620c25fd984f | [
"Apache-2.0"
] | 53 | 2020-04-29T04:46:48.000Z | 2020-05-08T03:43:28.000Z | tests/c/test_writebatch_wi.py | zeroae/zeroae-rocksdb | 3f1c81a42f27091161727c94a0d5620c25fd984f | [
"Apache-2.0"
] | null | null | null | import pytest
from zeroae.rocksdb.c import writebatch_wi
def test_fixture(rocksdb_writebatch_wi):
assert rocksdb_writebatch_wi is not None
@pytest.mark.xfail
def test_get_from_batch(rocksdb_writebatch_wi):
assert False
@pytest.mark.xfail
def test_get_from_batch_cf(rocksdb_writebatch_wi):
assert False
@pytest.mark.xfail
def test_get_from_batch_and_db(rocksdb_writebatch_wi):
assert False
@pytest.mark.xfail
def test_get_from_batch_and_db_cf(rocksdb_writebatch_wi):
assert False
@pytest.mark.xfail
def test_create_iterator_with_base(rocksdb_writebatch_wi):
assert False
@pytest.mark.xfail
def test_create_iterator_with_base_cf(rocksdb_writebatch_wi):
assert False
| 18.605263 | 61 | 0.820368 | 109 | 707 | 4.908257 | 0.247706 | 0.201869 | 0.284112 | 0.327103 | 0.785047 | 0.785047 | 0.725234 | 0.725234 | 0.661682 | 0.661682 | 0 | 0 | 0.121641 | 707 | 37 | 62 | 19.108108 | 0.861514 | 0 | 0 | 0.545455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.318182 | 1 | 0.318182 | false | 0 | 0.090909 | 0 | 0.409091 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1755c3bf58fe66b54c52920e6c5ae856f4166bc9 | 173 | py | Python | 7kyu/(7 kyu) Jaden Casing Strings/(7 kyu) Jaden Casing Strings.py | e1r0nd/codewars | dc98484281345e7675eb5e8a51c192e2fa77c443 | [
"MIT"
] | 49 | 2018-04-30T06:42:45.000Z | 2021-07-22T16:39:02.000Z | (7 kyu) Jaden Casing Strings/(7 kyu) Jaden Casing Strings.py | novsunheng/codewars | c54b1d822356889b91587b088d02ca0bd3d8dc9e | [
"MIT"
] | 1 | 2020-08-31T02:36:53.000Z | 2020-08-31T10:14:00.000Z | (7 kyu) Jaden Casing Strings/(7 kyu) Jaden Casing Strings.py | novsunheng/codewars | c54b1d822356889b91587b088d02ca0bd3d8dc9e | [
"MIT"
] | 25 | 2018-04-02T20:57:58.000Z | 2021-05-28T15:24:51.000Z | def toJadenCase(string):
# #1
# return " ".join([c[0].upper() + c[1::] for c in string.split()])
# 2
return " ".join(c.capitalize() for c in string.split())
| 28.833333 | 70 | 0.554913 | 26 | 173 | 3.692308 | 0.538462 | 0.208333 | 0.229167 | 0.25 | 0.354167 | 0 | 0 | 0 | 0 | 0 | 0 | 0.029851 | 0.225434 | 173 | 5 | 71 | 34.6 | 0.686567 | 0.393064 | 0 | 0 | 0 | 0 | 0.01 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 7 |
175e1892916a71ad3acbff85fb6b0703296fda7c | 576 | py | Python | Chiness_Unicode.py | xym5366/sim7600- | 4d91d232b6e4411675d7e5a6fdfdc2cb6f4fd2ca | [
"Apache-2.0"
] | 2 | 2021-02-17T01:52:55.000Z | 2021-02-17T01:52:59.000Z | Chiness_Unicode.py | xym5366/sim7600- | 4d91d232b6e4411675d7e5a6fdfdc2cb6f4fd2ca | [
"Apache-2.0"
] | null | null | null | Chiness_Unicode.py | xym5366/sim7600- | 4d91d232b6e4411675d7e5a6fdfdc2cb6f4fd2ca | [
"Apache-2.0"
] | null | null | null |
text0=b'30105FEB90128D855E02301160A876844E2D901A5FEB90125DF25230864E4E188DEF5B9E9A8C697C003253F7697C5FEB8D855E02FF0C8BF751ED0033003300300033003100305F5359295C3D5FEB63D053D6FF0C53CC53414E005FEB9012591AFF0C84254E1A65F695F40039003A00300030002D00320031003A00300030'
text=b'\u301083DC9E1F9A7F7AD9301160A8670900324E2A530588F9572859296D258BD59A8C697C4E0953F7697C5E97FF0C53D64EF678014E3A0031002D0038002D003400300030003130010037002D0038002D0034003000300034'
res =b'\u'.join(text0[i:i + 4] for i in range(-4, len(text0), 4))
print(res)
print(res.decode('unicode-escape'))
| 57.6 | 262 | 0.890625 | 29 | 576 | 17.689655 | 0.62069 | 0.031189 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.609489 | 0.048611 | 576 | 9 | 263 | 64 | 0.326642 | 0 | 0 | 0 | 0 | 0 | 0.787986 | 0.759717 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.4 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
bd7b42a88bcd29e5787b8e9ad202d6467642b89f | 5,468 | py | Python | test/features/t_links/fixtures_t_link.py | NikkaZ/dbtvault_spark | 383723cd2a35a0bc7b82fd4e77fb1eda0f68cb07 | [
"Apache-2.0"
] | null | null | null | test/features/t_links/fixtures_t_link.py | NikkaZ/dbtvault_spark | 383723cd2a35a0bc7b82fd4e77fb1eda0f68cb07 | [
"Apache-2.0"
] | null | null | null | test/features/t_links/fixtures_t_link.py | NikkaZ/dbtvault_spark | 383723cd2a35a0bc7b82fd4e77fb1eda0f68cb07 | [
"Apache-2.0"
] | null | null | null | from behave import fixture
@fixture
def t_link(context):
"""
Define the structures and metadata to load transactional links
"""
context.hashed_columns = {
"STG_CUSTOMER": {
"TRANSACTION_PK": ["CUSTOMER_ID", "ORDER_ID", "TRANSACTION_NUMBER"],
"CUSTOMER_FK": "CUSTOMER_ID",
"ORDER_FK": "ORDER_ID"
}
}
context.derived_columns = {
"STG_CUSTOMER": {
"EFFECTIVE_FROM": "TRANSACTION_DATE"
}
}
context.vault_structure_columns = {
"T_LINK": {
"src_pk": "TRANSACTION_PK",
"src_fk": ["CUSTOMER_FK", "ORDER_FK"],
"src_payload": ["TRANSACTION_NUMBER", "TRANSACTION_DATE",
"TYPE", "AMOUNT"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
}
}
context.seed_config = {
"RAW_STAGE": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR",
"ORDER_ID": "VARCHAR",
"TRANSACTION_NUMBER": "NUMBER(38,0)",
"TRANSACTION_DATE": "DATE",
"TYPE": "VARCHAR",
"AMOUNT": "NUMBER(38,2)",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR"
}
},
"T_LINK": {
"+column_types": {
"TRANSACTION_PK": "BINARY(16)",
"CUSTOMER_FK": "BINARY(16)",
"ORDER_FK": "BINARY(16)",
"TRANSACTION_NUMBER": "NUMBER(38,0)",
"TRANSACTION_DATE": "DATE",
"TYPE": "VARCHAR",
"AMOUNT": "NUMBER(38,2)",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR"
}
}
}
@fixture
def t_link_bigquery(context):
"""
Define the structures and metadata to load transactional links
"""
context.hashed_columns = {
"STG_CUSTOMER": {
"TRANSACTION_PK": ["CUSTOMER_ID", "ORDER_ID", "TRANSACTION_NUMBER"],
"CUSTOMER_FK": "CUSTOMER_ID",
"ORDER_FK": "ORDER_ID"
}
}
context.derived_columns = {
"STG_CUSTOMER": {
"EFFECTIVE_FROM": "TRANSACTION_DATE"
}
}
context.vault_structure_columns = {
"T_LINK": {
"src_pk": "TRANSACTION_PK",
"src_fk": ["CUSTOMER_FK", "ORDER_FK"],
"src_payload": ["TRANSACTION_NUMBER", "TRANSACTION_DATE",
"TYPE", "AMOUNT"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
}
}
context.seed_config = {
"RAW_STAGE": {
"+column_types": {
"CUSTOMER_ID": "STRING",
"ORDER_ID": "STRING",
"TRANSACTION_NUMBER": "STRING",
"TRANSACTION_DATE": "DATE",
"TYPE": "STRING",
"AMOUNT": "STRING",
"LOAD_DATE": "DATE",
"SOURCE": "STRING"
}
},
"T_LINK": {
"+column_types": {
"TRANSACTION_PK": "STRING",
"CUSTOMER_FK": "STRING",
"ORDER_FK": "STRING",
"TRANSACTION_NUMBER": "STRING",
"TRANSACTION_DATE": "DATE",
"TYPE": "STRING",
"AMOUNT": "STRING",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "STRING"
}
}
}
@fixture
def t_link_sqlserver(context):
"""
Define the structures and metadata to load transactional links
"""
context.hashed_columns = {
"STG_CUSTOMER": {
"TRANSACTION_PK": ["CUSTOMER_ID", "ORDER_ID", "TRANSACTION_NUMBER"],
"CUSTOMER_FK": "CUSTOMER_ID",
"ORDER_FK": "ORDER_ID"
}
}
context.derived_columns = {
"STG_CUSTOMER": {
"EFFECTIVE_FROM": "TRANSACTION_DATE"
}
}
context.vault_structure_columns = {
"T_LINK": {
"src_pk": "TRANSACTION_PK",
"src_fk": ["CUSTOMER_FK", "ORDER_FK"],
"src_payload": ["TRANSACTION_NUMBER", "TRANSACTION_DATE",
"TYPE", "AMOUNT"],
"src_eff": "EFFECTIVE_FROM",
"src_ldts": "LOAD_DATE",
"src_source": "SOURCE"
}
}
context.seed_config = {
"RAW_STAGE": {
"+column_types": {
"CUSTOMER_ID": "VARCHAR(50)",
"ORDER_ID": "VARCHAR(50)",
"TRANSACTION_NUMBER": "DECIMAL(38,0)",
"TRANSACTION_DATE": "DATE",
"TYPE": "VARCHAR(50)",
"AMOUNT": "DECIMAL(38,2)",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR(50)"
}
},
"T_LINK": {
"+column_types": {
"TRANSACTION_PK": "BINARY(16)",
"CUSTOMER_FK": "BINARY(16)",
"ORDER_FK": "BINARY(16)",
"TRANSACTION_NUMBER": "DECIMAL(38,0)",
"TRANSACTION_DATE": "DATE",
"TYPE": "VARCHAR(50)",
"AMOUNT": "DECIMAL(38,2)",
"EFFECTIVE_FROM": "DATE",
"LOAD_DATE": "DATE",
"SOURCE": "VARCHAR(50)"
}
}
}
| 29.085106 | 80 | 0.453914 | 448 | 5,468 | 5.214286 | 0.125 | 0.087329 | 0.046233 | 0.059075 | 0.931935 | 0.919092 | 0.904966 | 0.868579 | 0.868579 | 0.868579 | 0 | 0.014621 | 0.399598 | 5,468 | 187 | 81 | 29.240642 | 0.696924 | 0.034382 | 0 | 0.7125 | 0 | 0 | 0.380206 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.01875 | false | 0 | 0.00625 | 0 | 0.025 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.