hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
79d9e73ddf25d305b08db2eebdfb0061bfcb275a
| 17,230
|
py
|
Python
|
bookwyrm/migrations/0020_auto_20201208_0213.py
|
mouse-reeve/fedireads
|
e3471fcc3500747a1b1deaaca662021aae5b08d4
|
[
"CC0-1.0"
] | 270
|
2020-01-27T06:06:07.000Z
|
2020-06-21T00:28:18.000Z
|
bookwyrm/migrations/0020_auto_20201208_0213.py
|
mouse-reeve/fedireads
|
e3471fcc3500747a1b1deaaca662021aae5b08d4
|
[
"CC0-1.0"
] | 158
|
2020-02-10T20:36:54.000Z
|
2020-06-26T17:12:54.000Z
|
bookwyrm/migrations/0020_auto_20201208_0213.py
|
mouse-reeve/fedireads
|
e3471fcc3500747a1b1deaaca662021aae5b08d4
|
[
"CC0-1.0"
] | 15
|
2020-02-13T21:53:33.000Z
|
2020-06-17T16:52:46.000Z
|
# Generated by Django 3.0.7 on 2020-12-08 02:13
import bookwyrm.models.fields
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
("bookwyrm", "0019_auto_20201130_1939"),
]
operations = [
migrations.AlterField(
model_name="author",
name="aliases",
field=bookwyrm.models.fields.ArrayField(
base_field=models.CharField(max_length=255),
blank=True,
default=list,
size=None,
),
),
migrations.AlterField(
model_name="author",
name="bio",
field=bookwyrm.models.fields.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name="author",
name="born",
field=bookwyrm.models.fields.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name="author",
name="died",
field=bookwyrm.models.fields.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name="author",
name="name",
field=bookwyrm.models.fields.CharField(max_length=255),
),
migrations.AlterField(
model_name="author",
name="openlibrary_key",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="author",
name="wikipedia_link",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="book",
name="authors",
field=bookwyrm.models.fields.ManyToManyField(to="bookwyrm.Author"),
),
migrations.AlterField(
model_name="book",
name="cover",
field=bookwyrm.models.fields.ImageField(
blank=True, null=True, upload_to="covers/"
),
),
migrations.AlterField(
model_name="book",
name="description",
field=bookwyrm.models.fields.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name="book",
name="first_published_date",
field=bookwyrm.models.fields.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name="book",
name="goodreads_key",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="book",
name="languages",
field=bookwyrm.models.fields.ArrayField(
base_field=models.CharField(max_length=255),
blank=True,
default=list,
size=None,
),
),
migrations.AlterField(
model_name="book",
name="librarything_key",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="book",
name="openlibrary_key",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="book",
name="published_date",
field=bookwyrm.models.fields.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name="book",
name="series",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="book",
name="series_number",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="book",
name="sort_title",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="book",
name="subject_places",
field=bookwyrm.models.fields.ArrayField(
base_field=models.CharField(max_length=255),
blank=True,
default=list,
null=True,
size=None,
),
),
migrations.AlterField(
model_name="book",
name="subjects",
field=bookwyrm.models.fields.ArrayField(
base_field=models.CharField(max_length=255),
blank=True,
default=list,
null=True,
size=None,
),
),
migrations.AlterField(
model_name="book",
name="subtitle",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="book",
name="title",
field=bookwyrm.models.fields.CharField(max_length=255),
),
migrations.AlterField(
model_name="boost",
name="boosted_status",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="boosters",
to="bookwyrm.Status",
),
),
migrations.AlterField(
model_name="comment",
name="book",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="bookwyrm.Edition"
),
),
migrations.AlterField(
model_name="edition",
name="asin",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="edition",
name="isbn_10",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="edition",
name="isbn_13",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="edition",
name="oclc_number",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="edition",
name="pages",
field=bookwyrm.models.fields.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name="edition",
name="parent_work",
field=bookwyrm.models.fields.ForeignKey(
null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name="editions",
to="bookwyrm.Work",
),
),
migrations.AlterField(
model_name="edition",
name="physical_format",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
migrations.AlterField(
model_name="edition",
name="publishers",
field=bookwyrm.models.fields.ArrayField(
base_field=models.CharField(max_length=255),
blank=True,
default=list,
size=None,
),
),
migrations.AlterField(
model_name="favorite",
name="status",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="bookwyrm.Status"
),
),
migrations.AlterField(
model_name="favorite",
name="user",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL
),
),
migrations.AlterField(
model_name="image",
name="caption",
field=bookwyrm.models.fields.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name="image",
name="image",
field=bookwyrm.models.fields.ImageField(
blank=True, null=True, upload_to="status/"
),
),
migrations.AlterField(
model_name="quotation",
name="book",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="bookwyrm.Edition"
),
),
migrations.AlterField(
model_name="quotation",
name="quote",
field=bookwyrm.models.fields.TextField(),
),
migrations.AlterField(
model_name="review",
name="book",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="bookwyrm.Edition"
),
),
migrations.AlterField(
model_name="review",
name="name",
field=bookwyrm.models.fields.CharField(max_length=255, null=True),
),
migrations.AlterField(
model_name="review",
name="rating",
field=bookwyrm.models.fields.IntegerField(
blank=True,
default=None,
null=True,
validators=[
django.core.validators.MinValueValidator(1),
django.core.validators.MaxValueValidator(5),
],
),
),
migrations.AlterField(
model_name="shelf",
name="name",
field=bookwyrm.models.fields.CharField(max_length=100),
),
migrations.AlterField(
model_name="shelf",
name="privacy",
field=bookwyrm.models.fields.CharField(
choices=[
("public", "Public"),
("unlisted", "Unlisted"),
("followers", "Followers"),
("direct", "Direct"),
],
default="public",
max_length=255,
),
),
migrations.AlterField(
model_name="shelf",
name="user",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL
),
),
migrations.AlterField(
model_name="shelfbook",
name="added_by",
field=bookwyrm.models.fields.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="shelfbook",
name="book",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="bookwyrm.Edition"
),
),
migrations.AlterField(
model_name="shelfbook",
name="shelf",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="bookwyrm.Shelf"
),
),
migrations.AlterField(
model_name="status",
name="content",
field=bookwyrm.models.fields.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name="status",
name="mention_books",
field=bookwyrm.models.fields.TagField(
related_name="mention_book", to="bookwyrm.Edition"
),
),
migrations.AlterField(
model_name="status",
name="mention_users",
field=bookwyrm.models.fields.TagField(
related_name="mention_user", to=settings.AUTH_USER_MODEL
),
),
migrations.AlterField(
model_name="status",
name="published_date",
field=bookwyrm.models.fields.DateTimeField(
default=django.utils.timezone.now
),
),
migrations.AlterField(
model_name="status",
name="reply_parent",
field=bookwyrm.models.fields.ForeignKey(
null=True,
on_delete=django.db.models.deletion.PROTECT,
to="bookwyrm.Status",
),
),
migrations.AlterField(
model_name="status",
name="sensitive",
field=bookwyrm.models.fields.BooleanField(default=False),
),
migrations.AlterField(
model_name="status",
name="user",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL
),
),
migrations.AlterField(
model_name="tag",
name="name",
field=bookwyrm.models.fields.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name="userblocks",
name="user_object",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="userblocks_user_object",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userblocks",
name="user_subject",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="userblocks_user_subject",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollowrequest",
name="user_object",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="userfollowrequest_user_object",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollowrequest",
name="user_subject",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="userfollowrequest_user_subject",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollows",
name="user_object",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="userfollows_user_object",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="userfollows",
name="user_subject",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
related_name="userfollows_user_subject",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="usertag",
name="book",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="bookwyrm.Edition"
),
),
migrations.AlterField(
model_name="usertag",
name="tag",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to="bookwyrm.Tag"
),
),
migrations.AlterField(
model_name="usertag",
name="user",
field=bookwyrm.models.fields.ForeignKey(
on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL
),
),
migrations.AlterField(
model_name="work",
name="default_edition",
field=bookwyrm.models.fields.ForeignKey(
null=True,
on_delete=django.db.models.deletion.PROTECT,
to="bookwyrm.Edition",
),
),
migrations.AlterField(
model_name="work",
name="lccn",
field=bookwyrm.models.fields.CharField(
blank=True, max_length=255, null=True
),
),
]
| 33.917323
| 88
| 0.52188
| 1,482
| 17,230
| 5.930499
| 0.099865
| 0.108317
| 0.154739
| 0.221072
| 0.889976
| 0.877119
| 0.80669
| 0.772784
| 0.746388
| 0.733644
| 0
| 0.010609
| 0.370865
| 17,230
| 507
| 89
| 33.984221
| 0.800185
| 0.002612
| 0
| 0.806387
| 1
| 0
| 0.087528
| 0.010126
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011976
| 0
| 0.017964
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8de9009ea37818bbde85735940295b4b4847bd74
| 44
|
py
|
Python
|
sentenai/view/__init__.py
|
sentenai/py-sentenai
|
fec672ae1ac195523067d8f882cfe3419ab4c042
|
[
"BSD-3-Clause"
] | 1
|
2018-01-09T18:49:06.000Z
|
2018-01-09T18:49:06.000Z
|
sentenai/view/__init__.py
|
sentenai/py-sentenai
|
fec672ae1ac195523067d8f882cfe3419ab4c042
|
[
"BSD-3-Clause"
] | 168
|
2017-03-15T20:24:52.000Z
|
2022-03-15T14:41:26.000Z
|
sentenai/view/__init__.py
|
sentenai/py-sentenai
|
fec672ae1ac195523067d8f882cfe3419ab4c042
|
[
"BSD-3-Clause"
] | 4
|
2017-07-22T04:03:08.000Z
|
2017-12-22T00:21:21.000Z
|
from sentenai.view.views import View, Views
| 22
| 43
| 0.818182
| 7
| 44
| 5.142857
| 0.714286
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113636
| 44
| 1
| 44
| 44
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8df277ae5c151e40e06491452b0099df9d2eba6d
| 81
|
py
|
Python
|
foxcross/constants.py
|
laactech/foxcross
|
a55fb791461c97177a977c64d781b98859124bac
|
[
"BSD-3-Clause"
] | 19
|
2019-05-31T14:34:10.000Z
|
2021-02-12T18:10:50.000Z
|
foxcross/constants.py
|
laactechnology/foxcross
|
a55fb791461c97177a977c64d781b98859124bac
|
[
"BSD-3-Clause"
] | null | null | null |
foxcross/constants.py
|
laactechnology/foxcross
|
a55fb791461c97177a977c64d781b98859124bac
|
[
"BSD-3-Clause"
] | null | null | null |
SLUGIFY_REGEX = r"([a-z](?=[A-Z])|[A-Z](?=[A-Z][a-z]))"
SLUGIFY_REPLACE = r"\1-"
| 27
| 55
| 0.506173
| 17
| 81
| 2.294118
| 0.411765
| 0.25641
| 0.307692
| 0.410256
| 0.25641
| 0.25641
| 0.25641
| 0.25641
| 0
| 0
| 0
| 0.013333
| 0.074074
| 81
| 2
| 56
| 40.5
| 0.506667
| 0
| 0
| 0
| 0
| 0.5
| 0.481481
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c266728e2b966366b2568442d2e29dbd45fc1c7
| 12,108
|
py
|
Python
|
tests/api/test-msg.py
|
arienchen/pytibrv
|
9c198805bc9ac217e9a7f730d3c2dba32bf77336
|
[
"BSD-3-Clause"
] | 12
|
2017-03-17T15:02:02.000Z
|
2021-11-05T08:48:20.000Z
|
tests/api/test-msg.py
|
arienchen/pytibrv
|
9c198805bc9ac217e9a7f730d3c2dba32bf77336
|
[
"BSD-3-Clause"
] | null | null | null |
tests/api/test-msg.py
|
arienchen/pytibrv
|
9c198805bc9ac217e9a7f730d3c2dba32bf77336
|
[
"BSD-3-Clause"
] | 6
|
2019-10-04T23:12:25.000Z
|
2021-08-02T21:39:41.000Z
|
import ctypes
from pytibrv.api import *
from pytibrv.status import *
from pytibrv.msg import *
import unittest
class MsgTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
status = tibrv_Open()
assert TIBRV_OK == status, tibrvStatus_GetText(status)
@classmethod
def tearDownClass(cls):
tibrv_Close()
def test_new(self):
status, msg = tibrvMsg_Create()
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, sz = tibrvMsg_ConvertToString(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual("{}", sz)
status = tibrvMsg_Destroy(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
def test_copy(self):
status, msg = tibrvMsg_Create()
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status = tibrvMsg_UpdateString(msg, 'A', 'TEST')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, msg2 = tibrvMsg_CreateCopy(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, sz = tibrvMsg_ConvertToString(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, sz2 = tibrvMsg_ConvertToString(msg2)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(sz, sz2)
status = tibrvMsg_Destroy(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status = tibrvMsg_Destroy(msg2)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
def test_invalid(self):
status, msg = tibrvMsg_Create()
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status = tibrvMsg_Destroy(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
# construct by invalid msg id, which just destroyed
status = tibrvMsg_SetSendSubject(msg, 'TEST')
self.assertEqual(TIBRV_INVALID_MSG, status, tibrvStatus_GetText(status))
status = tibrvMsg_Destroy(msg)
self.assertEqual(TIBRV_INVALID_MSG, status, tibrvStatus_GetText(status))
# assign random msg id, ex: 12345
# DONT TRY IT, SEGMENT FAULT
#
#status = tibrvMsg_Destroy(12345)
def test_subject(self):
status, msg = tibrvMsg_Create()
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status = tibrvMsg_SetSendSubject(msg, 'TEST')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, subj = tibrvMsg_GetSendSubject(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual('TEST', subj)
status = tibrvMsg_SetReplySubject(msg, 'TEST2')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, subj = tibrvMsg_GetReplySubject(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual('TEST2', subj)
status = tibrvMsg_Destroy(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
def test_get(self):
status, msg = tibrvMsg_Create()
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status = tibrvMsg_UpdateI8(msg, 'I8', 0xFFFF)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, n = tibrvMsg_GetI8(msg, 'I8')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(-1, n)
status = tibrvMsg_UpdateU8(msg, 'U8', 0xFFFF)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, n = tibrvMsg_GetU8(msg, 'U8')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(0x00FF, n)
status = tibrvMsg_UpdateI16(msg, 'I16', 0xFFFFFFFE)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, n = tibrvMsg_GetI16(msg, 'I16')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(-2, n)
status = tibrvMsg_UpdateU16(msg, 'U16', 0xFFFFFFFE)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, n = tibrvMsg_GetU16(msg, 'U16')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(0x00FFFE, n)
status = tibrvMsg_UpdateI32(msg, 'I32', 0x0000FFFFFFFFFFFD)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, n = tibrvMsg_GetI32(msg, 'I32')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(-3, n)
status = tibrvMsg_UpdateU32(msg, 'U32', 0x0000FFFFFFFFFFFD)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, n = tibrvMsg_GetU32(msg, 'U32')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(0x00FFFFFFFD, n)
status = tibrvMsg_UpdateI64(msg, 'I64', 0xfffffffffffffffc)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, n = tibrvMsg_GetI64(msg, 'I64')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(-4, n)
status = tibrvMsg_UpdateU64(msg, 'U64', 0xFFFFFFFFFFFFFFFC)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, n = tibrvMsg_GetU64(msg, 'U64')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(0x00FFFFFFFFFFFFFFFC, n)
status = tibrvMsg_UpdateString(msg, 'STR', 'TEST')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, sz = tibrvMsg_GetString(msg, 'STR')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual('TEST', sz)
status, msg2 = tibrvMsg_Create()
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status = tibrvMsg_UpdateString(msg2, 'DATA', 'TEST')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status = tibrvMsg_UpdateMsg(msg, 'MSG', msg2)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, mm = tibrvMsg_GetMsg(msg, 'MSG')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, sz = tibrvMsg_ConvertToString(msg2)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, sz2 = tibrvMsg_ConvertToString(mm)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(sz, sz2)
status = tibrvMsg_Destroy(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status = tibrvMsg_Destroy(msg2)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
def test_datetime(self):
status, msg = tibrvMsg_Create()
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status = tibrvMsg_AddDateTime(msg, 'DT', None)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status = tibrvMsg_UpdateDateTime(msg, 'DT', None)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetDateTime(msg, 'DT')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
assert ret is not None
status, dt = tibrvMsg_GetCurrentTime()
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertTrue(type(dt) is tibrvMsgDateTime)
print(dt)
status = tibrvMsg_UpdateDateTime(msg, 'DT', dt)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, dt2 = tibrvMsg_GetDateTime(msg, 'DT')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertTrue(type(dt2) is tibrvMsgDateTime)
self.assertEqual(dt, dt2)
dt3 = tibrvMsgDateTime()
status = tibrvMsg_UpdateDateTime(msg, 'DT3', dt3)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, dt4 = tibrvMsg_GetDateTime(msg, 'DT3')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
self.assertEqual(dt3, dt4)
status = tibrvMsg_Destroy(msg)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
def test_array(self):
status, msg = tibrvMsg_Create()
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
# I8
data = [1,2,3,4,5]
status = tibrvMsg_UpdateI8Array(msg, 'I8', data)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetI8Array(msg, 'I8')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
assert data == ret
# U8
data = [1,2,3,4,5]
status = tibrvMsg_UpdateU8Array(msg, 'U8', data)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetU8Array(msg, 'U8')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
assert data == ret
# I16
data = [1,2,3,4,5]
status = tibrvMsg_UpdateI16Array(msg, 'I16', data)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetI16Array(msg, 'I16')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
assert data == ret
# U16
data = [1,2,3,4,5]
status = tibrvMsg_UpdateU16Array(msg, 'U16', data)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetU16Array(msg, 'U16')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
assert data == ret
# I32
data = [1,2,3,4,5]
status = tibrvMsg_UpdateI32Array(msg, 'I32', data)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetI32Array(msg, 'I32')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
assert data == ret
# U32
data = [1,2,3,4,5]
status = tibrvMsg_UpdateU32Array(msg, 'U32', data)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetU32Array(msg, 'U32')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
assert data == ret
# I64
data = [1,2,3,4,5]
status = tibrvMsg_UpdateI64Array(msg, 'I64', data)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetI32Array(msg, 'I64')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
assert data == ret
# U64
data = [1,2,3,4,5]
status = tibrvMsg_UpdateU64Array(msg, 'U64', data)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetU64Array(msg, 'U64')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
assert data == ret
# F32
data = [1.1,2.2,3.3,4.4,5.5]
status = tibrvMsg_UpdateF32Array(msg, 'F32', data)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetF32Array(msg, 'F32')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
for x in range(len(data)):
f = ctypes.c_float(data[x]).value # convert to F32
assert f == ret[x]
# F64
data = [1.1,2.2,3.3,4.4,5.5]
status = tibrvMsg_UpdateF64Array(msg, 'F64', data)
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
status, ret = tibrvMsg_GetF64Array(msg, 'F64')
self.assertEqual(TIBRV_OK, status, tibrvStatus_GetText(status))
assert data == ret
if __name__ == "__main__":
unittest.main(verbosity=2)
| 37.141104
| 80
| 0.670383
| 1,356
| 12,108
| 5.79646
| 0.117994
| 0.179389
| 0.241221
| 0.301527
| 0.806616
| 0.800636
| 0.795929
| 0.795929
| 0.759288
| 0.733842
| 0
| 0.029215
| 0.22258
| 12,108
| 325
| 81
| 37.255385
| 0.8058
| 0.01594
| 0
| 0.547511
| 0
| 0
| 0.015214
| 0
| 0
| 0
| 0.012608
| 0
| 0.488688
| 1
| 0.040724
| false
| 0
| 0.022624
| 0
| 0.067873
| 0.004525
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6905b55b6a2b883e0f2959cce6f56f198963f7f6
| 75
|
py
|
Python
|
models/__init__.py
|
mengjian0502/StructuredCG_RRAM
|
75a7d3541e8d392bbbcafdf20af72ab75fe4de78
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
mengjian0502/StructuredCG_RRAM
|
75a7d3541e8d392bbbcafdf20af72ab75fe4de78
|
[
"MIT"
] | null | null | null |
models/__init__.py
|
mengjian0502/StructuredCG_RRAM
|
75a7d3541e8d392bbbcafdf20af72ab75fe4de78
|
[
"MIT"
] | null | null | null |
from .resnet_cifar import *
from .resnet18_cifar import *
from .cg import *
| 25
| 29
| 0.773333
| 11
| 75
| 5.090909
| 0.545455
| 0.392857
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.146667
| 75
| 3
| 30
| 25
| 0.84375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
15f93403c6f0763dfabdb790769c6f2d173e4062
| 217
|
py
|
Python
|
travis-ci/tests/test_travis.py
|
taniamprince/ci-workshops
|
d9a137704fb3b905acccae6d12a76bdfc152dc1d
|
[
"MIT"
] | null | null | null |
travis-ci/tests/test_travis.py
|
taniamprince/ci-workshops
|
d9a137704fb3b905acccae6d12a76bdfc152dc1d
|
[
"MIT"
] | null | null | null |
travis-ci/tests/test_travis.py
|
taniamprince/ci-workshops
|
d9a137704fb3b905acccae6d12a76bdfc152dc1d
|
[
"MIT"
] | null | null | null |
import TravisCIWorkshop
def test_hello_world_works():
assert TravisCIWorkshop.say_hello() == "Hello world!"
def test_goodbye_world_works():
assert TravisCIWorkshop.say_goodbye() == "Goodbye, cruel world!"
| 21.7
| 68
| 0.760369
| 25
| 217
| 6.28
| 0.44
| 0.089172
| 0.203822
| 0.407643
| 0.44586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133641
| 217
| 9
| 69
| 24.111111
| 0.835106
| 0
| 0
| 0
| 0
| 0
| 0.152074
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
c616bb9ae89cf7a1f4c09924e4f77cf37dd9dc99
| 50,372
|
py
|
Python
|
pkgs/ops-pkg/src/genie/libs/ops/mcast/iosxr/tests/mcast_output.py
|
kecorbin/genielibs
|
5d3951b8911013691822e73e9c3d0f557ca10f43
|
[
"Apache-2.0"
] | null | null | null |
pkgs/ops-pkg/src/genie/libs/ops/mcast/iosxr/tests/mcast_output.py
|
kecorbin/genielibs
|
5d3951b8911013691822e73e9c3d0f557ca10f43
|
[
"Apache-2.0"
] | null | null | null |
pkgs/ops-pkg/src/genie/libs/ops/mcast/iosxr/tests/mcast_output.py
|
kecorbin/genielibs
|
5d3951b8911013691822e73e9c3d0f557ca10f43
|
[
"Apache-2.0"
] | null | null | null |
'''
Mcast Genie Ops Object Outputs for IOSXR.
'''
class McastOutput(object):
ShowVrfAllDetail = {
"default": {
"description": "not set",
"vrf_mode": "regular",
"address_family": {
"ipv6 unicast": {
"route_target": {
"400:1": {
"rt_type": "import",
"route_target": "400:1"},
"300:1": {
"rt_type": "import",
"route_target": "300:1"},
"200:1": {
"rt_type": "both",
"route_target": "200:1"},
"200:2": {
"rt_type": "import",
"route_target": "200:2"}}},
"ipv4 unicast": {
"route_target": {
"400:1": {
"rt_type": "import",
"route_target": "400:1"},
"300:1": {
"rt_type": "import",
"route_target": "300:1"},
"200:1": {
"rt_type": "both",
"route_target": "200:1"},
"200:2": {
"rt_type": "import",
"route_target": "200:2"}}}},
"route_distinguisher": "200:1",
"interfaces": ["GigabitEthernet0/0/0/1"]},
"VRF1": {
"description": "not set",
"vrf_mode": "regular",
"address_family": {
"ipv6 unicast": {
"route_target": {
"400:1": {
"rt_type": "import",
"route_target": "400:1"},
"300:1": {
"rt_type": "import",
"route_target": "300:1"},
"200:1": {
"rt_type": "both",
"route_target": "200:1"},
"200:2": {
"rt_type": "import",
"route_target": "200:2"}}},
"ipv4 unicast": {
"route_target": {
"400:1": {
"rt_type": "import",
"route_target": "400:1"},
"300:1": {
"rt_type": "import",
"route_target": "300:1"},
"200:1": {
"rt_type": "both",
"route_target": "200:1"},
"200:2": {
"rt_type": "import",
"route_target": "200:2"}}}},
"route_distinguisher": "200:1",
"interfaces": ["GigabitEthernet0/0/0/1"]}}
############################################
# INFO - VRF: default
############################################
PimVrfDefaultIpv4Mstatic = '''\
RP/0/0/CPU0:R2# show pim vrf default ipv4 mstatic
Mon May 29 14:37:05.732 UTC
IP Multicast Static Routes Information
* 10.10.10.10/32 via GigabitEthernet0/0/0/0 with nexthop 192.168.1.0 and distance 10
* 10.10.10.11/32 via GigabitEthernet0/0/0/1 with nexthop 192.168.1.1 and distance 11
* 10.10.10.12/32 via GigabitEthernet0/0/0/2 with nexthop 192.168.1.2 and distance 12
* 10.10.10.13/32 via GigabitEthernet0/0/0/3 with nexthop 192.168.1.3 and distance 13
* 10.10.10.14/32 via GigabitEthernet0/0/0/4 with nexthop 192.168.1.4 and distance 14
* 10.10.10.15/32 via GigabitEthernet0/0/0/5 with nexthop 192.168.1.5 and distance 15
* 10.10.10.16/32 via GigabitEthernet0/0/0/6 with nexthop 192.168.1.6 and distance 16
* 10.10.10.17/32 via GigabitEthernet0/0/0/7 with nexthop 192.168.1.7 and distance 17
'''
PimVrfDefaultIpv6Mstatic = '''\
RP/0/0/CPU0:R2# show pim vrf default ipv6 mstatic
Mon May 29 14:37:26.421 UTC
IP Multicast Static Routes Information
* 2001:10:10::10/128 via GigabitEthernet0/0/0/0 with nexthop 2001:11:11::10 and distance 10
* 2001:10:10::11/128 via GigabitEthernet0/0/0/1 with nexthop 2001:11:11::11 and distance 11
* 2001:10:10::12/128 via GigabitEthernet0/0/0/2 with nexthop 2001:11:11::12 and distance 12
* 2001:10:10::13/128 via GigabitEthernet0/0/0/3 with nexthop 2001:11:11::13 and distance 13
* 2001:10:10::14/128 via GigabitEthernet0/0/0/4 with nexthop 2001:11:11::14 and distance 14
* 2001:10:10::15/128 via GigabitEthernet0/0/0/5 with nexthop 2001:11:11::15 and distance 15
'''
PimVrfDefaultIpv4InterfaceDetail = '''\
RP/0/0/CPU0:R2#show pim vrf default ipv4 interface detail
Mon May 29 14:41:28.444 UTC
PIM interfaces in VRF default
IP PIM Multicast Interface State
Flag: B - Bidir enabled, NB - Bidir disabled
P - PIM Proxy enabled, NP - PIM Proxy disabled
V - Virtual Interface
BFD State - State/Interval/Multiplier
Interface PIM Nbr Hello DR
Count Intvl Prior
Loopback0 on 1 30 1
Primary Address : 2.2.2.2
Flags : B P V
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:15
Neighbor Filter : -
GigabitEthernet0/0/0/0 on 1 30 1
Primary Address : 10.2.3.2
Flags : B P
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:01
Neighbor Filter : -
GigabitEthernet0/0/0/1 on 2 30 1
Primary Address : 10.1.2.2
Flags : NB P
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:07
Neighbor Filter : -
'''
PimVrfDefaultIpv6InterfaceDetail = '''\
RP/0/0/CPU0:R2#show pim vrf default ipv6 interface detail
Mon May 29 14:41:52.972 UTC
PIM interfaces in VRF default
IP PIM Multicast Interface State
Flag: B - Bidir enabled, NB - Bidir disabled
P - PIM Proxy enabled, NP - PIM Proxy disabled
A - PIM Assert batching capable, NA - PIM Assert batching incapable
V - Virtual Interface
Interface PIM Nbr Hello DR
Count Intvl Prior
Loopback0 on 1 30 1
Primary Address : fe80::85c6:bdff:fe62:61e
Address : 2001:db8:2:2::2
Flags : B P NA V
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:19
Neighbor Filter : -
GigabitEthernet0/0/0/0 on 1 30 1
Primary Address : fe80::5054:ff:fee4:f669
Address : 2001:db8:2:3::2
Flags : B P NA
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:22
Neighbor Filter : -
GigabitEthernet0/0/0/1 on 1 30 1
Primary Address : fe80::5054:ff:feac:64b3
Address : 2001:db8:1:2::2
Flags : B P NA
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:02
Neighbor Filter : -
'''
PimVrfDefaultIpv4RpfSummary = '''\
RP/0/0/CPU0:R2#show pim vrf default ipv4 rpf summary
Mon May 29 14:42:47.569 UTC
ISIS Mcast Topology Not configured
MoFRR Flow-based Not configured
MoFRR RIB Not configured
RUMP MuRIB Not enabled
PIM RPFs registered with Unicast RIB table
Default RPF Table: IPv4-Unicast-default
RIB Convergence Timeout Value: 00:30:00
RIB Convergence Time Left: 00:00:00
Multipath RPF Selection is Enabled
Table: IPv4-Unicast-default
PIM RPF Registrations = 1
RIB Table converged
'''
PimVrfDefaultIpv6RpfSummary = '''\
RP/0/0/CPU0:R2#show pim vrf default ipv6 rpf summary
Mon May 29 14:42:53.538 UTC
ISIS Mcast Topology Not configured
MoFRR Flow-based Not configured
MoFRR RIB Not configured
RUMP MuRIB Not enabled
PIM RPFs registered with Unicast RIB table
Default RPF Table: IPv6-Unicast-default
RIB Convergence Timeout Value: 00:30:00
RIB Convergence Time Left: 00:00:00
Multipath RPF Selection is Enabled
Table: IPv6-Unicast-default
PIM RPF Registrations = 0
RIB Table converged
'''
############################################
# INFO - VRF: VRF1
############################################
PimVrfVRF1Ipv4Mstatic = '''\
RP/0/0/CPU0:R2# show pim vrf VRF1 ipv4 mstatic
Mon May 29 14:37:05.732 UTC
IP Multicast Static Routes Information
* 20.10.10.10/32 via GigabitEthernet1/0/0/0 with nexthop 192.168.1.0 and distance 10
* 20.10.10.11/32 via GigabitEthernet1/0/0/1 with nexthop 192.168.1.1 and distance 11
* 20.10.10.12/32 via GigabitEthernet1/0/0/2 with nexthop 192.168.1.2 and distance 12
* 20.10.10.13/32 via GigabitEthernet1/0/0/3 with nexthop 192.168.1.3 and distance 13
* 20.10.10.14/32 via GigabitEthernet1/0/0/4 with nexthop 192.168.1.4 and distance 14
* 20.10.10.15/32 via GigabitEthernet1/0/0/5 with nexthop 192.168.1.5 and distance 15
* 20.10.10.16/32 via GigabitEthernet1/0/0/6 with nexthop 192.168.1.6 and distance 16
* 20.10.10.17/32 via GigabitEthernet1/0/0/7 with nexthop 192.168.1.7 and distance 17
'''
PimVrfVRF1Ipv6Mstatic = '''\
RP/0/0/CPU0:R2# show pim vrf VRF1 ipv6 mstatic
Mon May 29 14:37:26.421 UTC
IP Multicast Static Routes Information
* 3001:10:10::10/128 via GigabitEthernet1/0/0/0 with nexthop 2001:11:11::10 and distance 10
* 3001:10:10::11/128 via GigabitEthernet1/0/0/1 with nexthop 2001:11:11::11 and distance 11
* 3001:10:10::12/128 via GigabitEthernet1/0/0/2 with nexthop 2001:11:11::12 and distance 12
* 3001:10:10::13/128 via GigabitEthernet1/0/0/3 with nexthop 2001:11:11::13 and distance 13
* 3001:10:10::14/128 via GigabitEthernet1/0/0/4 with nexthop 2001:11:11::14 and distance 14
* 3001:10:10::15/128 via GigabitEthernet1/0/0/5 with nexthop 2001:11:11::15 and distance 15
'''
PimVrfVRF1Ipv4InterfaceDetail = '''\
RP/0/0/CPU0:R2#show pim vrf VRF1 ipv4 interface detail
Mon May 29 14:41:28.444 UTC
PIM interfaces in VRF VRF1
IP PIM Multicast Interface State
Flag: B - Bidir enabled, NB - Bidir disabled
P - PIM Proxy enabled, NP - PIM Proxy disabled
V - Virtual Interface
BFD State - State/Interval/Multiplier
Interface PIM Nbr Hello DR
Count Intvl Prior
Loopback0 on 1 30 1
Primary Address : 2.2.2.2
Flags : B P V
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:15
Neighbor Filter : -
GigabitEthernet0/0/0/0 on 1 30 1
Primary Address : 10.2.3.2
Flags : B P
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:01
Neighbor Filter : -
GigabitEthernet0/0/0/1 on 2 30 1
Primary Address : 10.1.2.2
Flags : NB P
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:07
Neighbor Filter : -
'''
PimVrfVRF1Ipv6InterfaceDetail = '''\
RP/0/0/CPU0:R2#show pim vrf VRF1 ipv6 interface detail
Mon May 29 14:41:52.972 UTC
PIM interfaces in VRF VRF1
IP PIM Multicast Interface State
Flag: B - Bidir enabled, NB - Bidir disabled
P - PIM Proxy enabled, NP - PIM Proxy disabled
A - PIM Assert batching capable, NA - PIM Assert batching incapable
V - Virtual Interface
Interface PIM Nbr Hello DR
Count Intvl Prior
Loopback0 on 1 30 1
Primary Address : fe80::85c6:bdff:fe62:61e
Address : 2001:db8:2:2::2
Flags : B P NA V
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:19
Neighbor Filter : -
GigabitEthernet0/0/0/0 on 1 30 1
Primary Address : fe80::5054:ff:fee4:f669
Address : 2001:db8:2:3::2
Flags : B P NA
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:22
Neighbor Filter : -
GigabitEthernet0/0/0/1 on 1 30 1
Primary Address : fe80::5054:ff:feac:64b3
Address : 2001:db8:1:2::2
Flags : B P NA
BFD : Off/150 ms/3
DR : this system
Propagation delay : 500
Override Interval : 2500
Hello Timer : 00:00:02
Neighbor Filter : -
'''
PimVrfVRF1Ipv4RpfSummary = '''\
RP/0/0/CPU0:R2#show pim VRF1 default ipv4 rpf summary
Mon May 29 14:42:47.569 UTC
ISIS Mcast Topology Not configured
MoFRR Flow-based Not configured
MoFRR RIB Not configured
RUMP MuRIB Not enabled
PIM RPFs registered with Unicast RIB table
Default RPF Table: IPv4-Unicast-default
RIB Convergence Timeout Value: 00:30:00
RIB Convergence Time Left: 00:00:00
Multipath RPF Selection is Enabled
Table: IPv4-Unicast-default
PIM RPF Registrations = 1
RIB Table converged
'''
PimVrfVRF1Ipv6RpfSummary = '''\
RP/0/0/CPU0:R2#show pim vrf VRF1 ipv6 rpf summary
Mon May 29 14:42:53.538 UTC
ISIS Mcast Topology Not configured
MoFRR Flow-based Not configured
MoFRR RIB Not configured
RUMP MuRIB Not enabled
PIM RPFs registered with Unicast RIB table
Default RPF Table: IPv6-Unicast-default
RIB Convergence Timeout Value: 00:30:00
RIB Convergence Time Left: 00:00:00
Multipath RPF Selection is Enabled
Table: IPv6-Unicast-default
PIM RPF Registrations = 0
RIB Table converged
'''
############################################
# TABLE - VRF: default
############################################
MribVrfDefaultIpv4Route = '''\
RP/0/1/CPU0:rtr1#show mrib vrf default ipv4 route
Mon Nov 2 15:26:01.015 PST
IP Multicast Routing Information Base
Entry flags: L - Domain-Local Source, E - External Source to the Domain,
C - Directly-Connected Check, S - Signal, IA - Inherit Accept,
IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID,
MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle
CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet
MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary
MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN
Interface flags: F - Forward, A - Accept, IC - Internal Copy,
NS - Negate Signal, DP - Don't Preserve, SP - Signal Present,
II - Internal Interest, ID - Internal Disinterest, LI - Local Interest,
LD - Local Disinterest, DI - Decapsulation Interface
EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap,
EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed,
MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface
IRMI - IR MDT Interface
(*,224.0.0.0/4) RPF nbr: 0.0.0.0 Flags: C RPF P
Up: 00:00:58
(*,224.0.0.0/24) Flags: D P
Up: 00:00:58
(*,224.0.1.39) Flags: S P
Up: 00:00:58
(*,227.1.1.1) RPF nbr: 0.0.0.0 Flags: C RPF MD MH CD
MVPN TID: 0xe000001f
MVPN Remote TID: 0x0
MVPN Payload: IPv4
MDT IFH: 0x803380
Up: 00:00:54
Outgoing Interface List
Loopback0 Flags: F NS, Up: 00:00:54
(192.168.0.12,227.1.1.1) RPF nbr: 192.168.0.12 Flags: RPF ME MH
MVPN TID: 0xe000001f
MVPN Remote TID: 0x0
MVPN Payload: IPv4
MDT IFH: 0x803380
Up: 00:00:54
Incoming Interface List
Loopback0 Flags: F NS, Up: 00:00:58
Outgoing Interface List
Loopback0 Flags: F A, Up: 00:00:54
(*,232.0.0.0/8) Flags: D P
Up: 00:00:58
(*,236.5.5.5) RPF nbr: 0.0.0.0 Flags: C RPF MD MH CD
MVPN TID: 0xe0000018
MVPN Remote TID: 0xe0800018
MVPN Payload: IPv4 IPv6
MDT IFH: 0x803480
Up: 00:00:54
Outgoing Interface List
Loopback0 Flags: F NS, Up: 00:00:54
(192.168.0.12,236.5.5.5) RPF nbr: 192.168.0.12 Flags: RPF ME MH
MVPN TID: 0xe0000018
MVPN Remote TID: 0xe0800018
MVPN Payload: IPv4 IPv6
MDT IFH: 0x803480
Up: 00:00:54
Incoming Interface List
Loopback0 Flags: F A, Up: 00:00:54
Outgoing Interface List
Loopback0 Flags: F A, Up: 00:00:54
(192.168.0.22,236.5.5.5) RPF nbr: 11.0.1.22 Flags: C RPF MD MH CD
MVPN TID: 0xe0000018
MVPN Remote TID: 0xe0800018
MVPN Payload: IPv4 IPv6
MDT IFH: 0x803480
Up: 00:00:13
Outgoing Interface List
Loopback0 Flags: F NS, Up: 00:00:13
GigabitEthernet0/1/0/1 Flags: NS, Up: 00:00:01
'''
MribVrfDefaultIpv6Route = '''\
RP/0/1/CPU0:rtr1#show mrib vrf default ipv6 route
Mon Nov 2 15:26:01.015 PST
IP Multicast Routing Information Base
Entry flags: L - Domain-Local Source, E - External Source to the Domain,
C - Directly-Connected Check, S - Signal, IA - Inherit Accept,
IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID,
MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle
CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet
MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary
MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN
Interface flags: F - Forward, A - Accept, IC - Internal Copy,
NS - Negate Signal, DP - Don't Preserve, SP - Signal Present,
II - Internal Interest, ID - Internal Disinterest, LI - Local Interest,
LD - Local Disinterest, DI - Decapsulation Interface
EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap,
EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed,
MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface
IRMI - IR MDT Interface
(*,ff00::/8)
RPF nbr: 150::150:150:150:150 Flags: L C RPF P
Up: 00:04:45
Outgoing Interface List
Decaps6tunnel0 Flags: NS DI, Up: 00:04:40
(*,ff00::/15)
Flags: D P
Up: 00:04:45
(*,ff02::/16)
Flags: D P
Up: 00:04:45
(*,ff10::/15)
Flags: D P
Up: 00:04:45
(*,ff12::/16)
Flags: D P
Up: 00:04:45
(1::1:1:1:2,ff15::1:1)
RPF nbr: 1::1:1:1:2 Flags: L RPF MT
MT Slot: 0/2/CPU0
Up: 00:02:53
Incoming Interface List
GigabitEthernet150/0/0/6 Flags: A, Up: 00:02:53
Outgoing Interface List
mdtvpn1 Flags: F NS MI MT MA, Up: 00:02:53
(4::4:4:4:5,ff15::2:1)
RPF nbr: ::ffff:200.200.200.200 Flags: L RPF
Up: 00:03:59
Incoming Interface List
mdtvpn1 Flags: A MI, Up: 00:03:35
Outgoing Interface List
GigabitEthernet150/0/0/6 Flags: F NS, Up: 00:03:59
(*,ff20::/15)
Flags: D P
Up: 00:04:45
(*,ff22::/16)
Flags: D P
Up: 00:04:45
'''
############################################
# TABLE - VRF: VRF1
############################################
MribVrfVRF1Ipv4Route = '''\
RP/0/1/CPU0:rtr1#show mrib vrf VRF1 ipv4 route
Mon Nov 2 15:26:01.015 PST
IP Multicast Routing Information Base
Entry flags: L - Domain-Local Source, E - External Source to the Domain,
C - Directly-Connected Check, S - Signal, IA - Inherit Accept,
IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID,
MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle
CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet
MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary
MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN
Interface flags: F - Forward, A - Accept, IC - Internal Copy,
NS - Negate Signal, DP - Don't Preserve, SP - Signal Present,
II - Internal Interest, ID - Internal Disinterest, LI - Local Interest,
LD - Local Disinterest, DI - Decapsulation Interface
EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap,
EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed,
MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface
IRMI - IR MDT Interface
(*,234.0.0.0/4) RPF nbr: 0.0.0.1 Flags: MD RPF P
Up: 00:01:28
(*,124.0.0.0/32) Flags: P D
Up: 00:01:38
(*,124.0.1.40) Flags: S P
Up: 00:00:46
(172.150.0.15,217.1.1.1) RPF nbr: 192.168.0.12 Flags: RPF ME MH
MVPN TID: 0xe000001f
MVPN Remote TID: 0x0
MVPN Payload: IPv4
MDT IFH: 0x803380
Up: 00:00:54
Incoming Interface List
GigabitEthernet0/0/0/1 Flags: F NS, Up: 00:01:38
Outgoing Interface List
GigabitEthernet0/0/0/2 Flags: F A, Up: 00:01:24
'''
MribVrfVRF1Ipv6Route = '''\
RP/0/1/CPU0:rtr1#show mrib vrf VRF1 ipv6 route
Mon Nov 2 15:26:01.015 PST
IP Multicast Routing Information Base
Entry flags: L - Domain-Local Source, E - External Source to the Domain,
C - Directly-Connected Check, S - Signal, IA - Inherit Accept,
IF - Inherit From, D - Drop, ME - MDT Encap, EID - Encap ID,
MD - MDT Decap, MT - MDT Threshold Crossed, MH - MDT interface handle
CD - Conditional Decap, MPLS - MPLS Decap, EX - Extranet
MoFE - MoFRR Enabled, MoFS - MoFRR State, MoFP - MoFRR Primary
MoFB - MoFRR Backup, RPFID - RPF ID Set, X - VXLAN
Interface flags: F - Forward, A - Accept, IC - Internal Copy,
NS - Negate Signal, DP - Don't Preserve, SP - Signal Present,
II - Internal Interest, ID - Internal Disinterest, LI - Local Interest,
LD - Local Disinterest, DI - Decapsulation Interface
EI - Encapsulation Interface, MI - MDT Interface, LVIF - MPLS Encap,
EX - Extranet, A2 - Secondary Accept, MT - MDT Threshold Crossed,
MA - Data MDT Assigned, LMI - mLDP MDT Interface, TMI - P2MP-TE MDT Interface
IRMI - IR MDT Interface
(*,ff70::/12)
RPF nbr: :: Flags: C RPF P
Up: 00:04:45
(*,ff70::/15)
Flags: D P
Up: 00:04:45
(*,ff72::/16)
Flags: D P
Up: 00:04:45
(*,ff80::/15)
Flags: D P
Up: 00:04:45
(*,ff82::/16)
Flags: D P
Up: 00:04:45
(*,ff90::/15)
Flags: D P
Up: 00:04:45
'''
McastInfo = {
'vrf':
{'VRF1':
{'address_family':
{'ipv4':
{'enable': True,
'mroute':
{'20.10.10.10/32':
{'path':
{'192.168.1.0 GigabitEthernet1/0/0/0 10':
{'admin_distance': 10,
'interface_name': 'GigabitEthernet1/0/0/0',
'neighbor_address': '192.168.1.0'}}},
'20.10.10.11/32':
{'path':
{'192.168.1.1 GigabitEthernet1/0/0/1 11':
{'admin_distance': 11,
'interface_name': 'GigabitEthernet1/0/0/1',
'neighbor_address': '192.168.1.1'}}},
'20.10.10.12/32':
{'path':
{'192.168.1.2 GigabitEthernet1/0/0/2 12':
{'admin_distance': 12,
'interface_name': 'GigabitEthernet1/0/0/2',
'neighbor_address': '192.168.1.2'}}},
'20.10.10.13/32':
{'path':
{'192.168.1.3 GigabitEthernet1/0/0/3 13':
{'admin_distance': 13,
'interface_name': 'GigabitEthernet1/0/0/3',
'neighbor_address': '192.168.1.3'}}},
'20.10.10.14/32':
{'path':
{'192.168.1.4 GigabitEthernet1/0/0/4 14':
{'admin_distance': 14,
'interface_name': 'GigabitEthernet1/0/0/4',
'neighbor_address': '192.168.1.4'}}},
'20.10.10.15/32':
{'path':
{'192.168.1.5 GigabitEthernet1/0/0/5 15':
{'admin_distance': 15,
'interface_name': 'GigabitEthernet1/0/0/5',
'neighbor_address': '192.168.1.5'}}},
'20.10.10.16/32':
{'path':
{'192.168.1.6 GigabitEthernet1/0/0/6 16':
{'admin_distance': 16,
'interface_name': 'GigabitEthernet1/0/0/6',
'neighbor_address': '192.168.1.6'}}},
'20.10.10.17/32':
{'path':
{'192.168.1.7 GigabitEthernet1/0/0/7 17':
{'admin_distance': 17,
'interface_name': 'GigabitEthernet1/0/0/7',
'neighbor_address': '192.168.1.7'}}}},
'multipath': True},
'ipv6':
{'enable': True,
'mroute':
{'3001:10:10::10/128':
{'path':
{'2001:11:11::10 GigabitEthernet1/0/0/0 10':
{'admin_distance': 10,
'interface_name': 'GigabitEthernet1/0/0/0',
'neighbor_address': '2001:11:11::10'}}},
'3001:10:10::11/128':
{'path':
{'2001:11:11::11 GigabitEthernet1/0/0/1 11':
{'admin_distance': 11,
'interface_name': 'GigabitEthernet1/0/0/1',
'neighbor_address': '2001:11:11::11'}}},
'3001:10:10::12/128':
{'path':
{'2001:11:11::12 GigabitEthernet1/0/0/2 12':
{'admin_distance': 12,
'interface_name': 'GigabitEthernet1/0/0/2',
'neighbor_address': '2001:11:11::12'}}},
'3001:10:10::13/128':
{'path':
{'2001:11:11::13 GigabitEthernet1/0/0/3 13':
{'admin_distance': 13,
'interface_name': 'GigabitEthernet1/0/0/3',
'neighbor_address': '2001:11:11::13'}}},
'3001:10:10::14/128':
{'path':
{'2001:11:11::14 GigabitEthernet1/0/0/4 14':
{'admin_distance': 14,
'interface_name': 'GigabitEthernet1/0/0/4',
'neighbor_address': '2001:11:11::14'}}},
'3001:10:10::15/128':
{'path':
{'2001:11:11::15 GigabitEthernet1/0/0/5 15':
{'admin_distance': 15,
'interface_name': 'GigabitEthernet1/0/0/5',
'neighbor_address': '2001:11:11::15'}}}},
'multipath': True}}},
'default':
{'address_family':
{'ipv4':
{'enable': True,
'mroute':
{'10.10.10.10/32':
{'path':
{'192.168.1.0 GigabitEthernet0/0/0/0 10':
{'admin_distance': 10,
'interface_name': 'GigabitEthernet0/0/0/0',
'neighbor_address': '192.168.1.0'}}},
'10.10.10.11/32':
{'path':
{'192.168.1.1 GigabitEthernet0/0/0/1 11':
{'admin_distance': 11,
'interface_name': 'GigabitEthernet0/0/0/1',
'neighbor_address': '192.168.1.1'}}},
'10.10.10.12/32':
{'path':
{'192.168.1.2 GigabitEthernet0/0/0/2 12':
{'admin_distance': 12,
'interface_name': 'GigabitEthernet0/0/0/2',
'neighbor_address': '192.168.1.2'}}},
'10.10.10.13/32':
{'path':
{'192.168.1.3 GigabitEthernet0/0/0/3 13':
{'admin_distance': 13,
'interface_name': 'GigabitEthernet0/0/0/3',
'neighbor_address': '192.168.1.3'}}},
'10.10.10.14/32':
{'path':
{'192.168.1.4 GigabitEthernet0/0/0/4 14':
{'admin_distance': 14,
'interface_name': 'GigabitEthernet0/0/0/4',
'neighbor_address': '192.168.1.4'}}},
'10.10.10.15/32':
{'path':
{'192.168.1.5 GigabitEthernet0/0/0/5 15':
{'admin_distance': 15,
'interface_name': 'GigabitEthernet0/0/0/5',
'neighbor_address': '192.168.1.5'}}},
'10.10.10.16/32':
{'path':
{'192.168.1.6 GigabitEthernet0/0/0/6 16':
{'admin_distance': 16,
'interface_name': 'GigabitEthernet0/0/0/6',
'neighbor_address': '192.168.1.6'}}},
'10.10.10.17/32':
{'path':
{'192.168.1.7 GigabitEthernet0/0/0/7 17':
{'admin_distance': 17,
'interface_name': 'GigabitEthernet0/0/0/7',
'neighbor_address': '192.168.1.7'}}}},
'multipath': True},
'ipv6':
{'enable': True,
'mroute':
{'2001:10:10::10/128':
{'path':
{'2001:11:11::10 GigabitEthernet0/0/0/0 10':
{'admin_distance': 10,
'interface_name': 'GigabitEthernet0/0/0/0',
'neighbor_address': '2001:11:11::10'}}},
'2001:10:10::11/128':
{'path':
{'2001:11:11::11 GigabitEthernet0/0/0/1 11':
{'admin_distance': 11,
'interface_name': 'GigabitEthernet0/0/0/1',
'neighbor_address': '2001:11:11::11'}}},
'2001:10:10::12/128':
{'path':
{'2001:11:11::12 GigabitEthernet0/0/0/2 12':
{'admin_distance': 12,
'interface_name': 'GigabitEthernet0/0/0/2',
'neighbor_address': '2001:11:11::12'}}},
'2001:10:10::13/128':
{'path':
{'2001:11:11::13 GigabitEthernet0/0/0/3 13':
{'admin_distance': 13,
'interface_name': 'GigabitEthernet0/0/0/3',
'neighbor_address': '2001:11:11::13'}}},
'2001:10:10::14/128':
{'path':
{'2001:11:11::14 GigabitEthernet0/0/0/4 14':
{'admin_distance': 14,
'interface_name': 'GigabitEthernet0/0/0/4',
'neighbor_address': '2001:11:11::14'}}},
'2001:10:10::15/128':
{'path':
{'2001:11:11::15 GigabitEthernet0/0/0/5 15':
{'admin_distance': 15,
'interface_name': 'GigabitEthernet0/0/0/5',
'neighbor_address': '2001:11:11::15'}}}},
'multipath': True}}}}}
McastTable = {
'vrf':
{'VRF1':
{'address_family':
{'ipv4':
{'multicast_group':
{'124.0.0.0/32':
{'source_address':
{'*':
{'flags': 'P D',
'uptime': '00:01:38'}}},
'124.0.1.40':
{'source_address':
{'*':
{'flags': 'S P',
'uptime': '00:00:46'}}},
'217.1.1.1':
{'source_address':
{'172.150.0.15':
{'flags': 'RPF ME MH',
'incoming_interface_list':
{'GigabitEthernet0/0/0/1':
{'rpf_nbr': '192.168.0.12'}},
'outgoing_interface_list':
{'GigabitEthernet0/0/0/2':
{'flags': 'F A',
'uptime': '00:01:24'}},
'uptime': '00:00:54'}}},
'234.0.0.0/4':
{'source_address':
{'*':
{'flags': 'MD RPF P',
'uptime': '00:01:28'}}}}},
'ipv6':
{'multicast_group':
{'ff70::/12':
{'source_address':
{'*':
{'flags': 'C RPF P',
'uptime': '00:04:45'}}},
'ff70::/15':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}},
'ff72::/16':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}},
'ff80::/15':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}},
'ff82::/16':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}},
'ff90::/15':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}}}}}},
'default':
{'address_family':
{'ipv4':
{'multicast_group':
{'224.0.0.0/24':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:00:58'}}},
'224.0.0.0/4':
{'source_address':
{'*':
{'flags': 'C RPF P',
'uptime': '00:00:58'}}},
'224.0.1.39':
{'source_address':
{'*':
{'flags': 'S P',
'uptime': '00:00:58'}}},
'227.1.1.1':
{'source_address':
{'*':
{'flags': 'C RPF MD MH CD',
'outgoing_interface_list':
{'Loopback0':
{'flags': 'F NS',
'uptime': '00:00:54'}},
'uptime': '00:00:54'},
'192.168.0.12':
{'flags': 'RPF ME MH',
'incoming_interface_list':
{'Loopback0':
{'rpf_nbr': '192.168.0.12'}},
'outgoing_interface_list':
{'Loopback0':
{'flags': 'F A',
'uptime': '00:00:54'}},
'uptime': '00:00:54'}}},
'232.0.0.0/8':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:00:58'}}},
'236.5.5.5':
{'source_address':
{'*':
{'flags': 'C RPF MD MH CD',
'outgoing_interface_list':
{'Loopback0':
{'flags': 'F NS',
'uptime': '00:00:54'}},
'uptime': '00:00:54'},
'192.168.0.12':
{'flags': 'RPF ME MH',
'incoming_interface_list':
{'Loopback0':
{'rpf_nbr': '192.168.0.12'}},
'outgoing_interface_list':
{'Loopback0':
{'flags': 'F A',
'uptime': '00:00:54'}},
'uptime': '00:00:54'},
'192.168.0.22':
{'flags': 'C RPF MD MH CD',
'outgoing_interface_list':
{'GigabitEthernet0/1/0/1':
{'flags': 'NS',
'uptime': '00:00:01'},
'Loopback0': {'flags': 'F NS',
'uptime': '00:00:13'}},
'uptime': '00:00:13'}}}}},
'ipv6':
{'multicast_group':
{'ff00::/15':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}},
'ff00::/8':
{'source_address':
{'*':
{'flags': 'L C RPF P',
'outgoing_interface_list':
{'Decaps6tunnel0':
{'flags': 'NS DI',
'uptime': '00:04:40'}},
'uptime': '00:04:45'}}},
'ff02::/16':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}},
'ff10::/15':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}},
'ff12::/16':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}},
'ff15::1:1':
{'source_address':
{'1::1:1:1:2':
{'flags': 'L RPF MT',
'incoming_interface_list':
{'GigabitEthernet150/0/0/6':
{'rpf_nbr': '1::1:1:1:2'}},
'outgoing_interface_list':
{'mdtvpn1':
{'flags': 'F NS MI MT MA',
'uptime': '00:02:53'}},
'uptime': '00:02:53'}}},
'ff15::2:1':
{'source_address':
{'4::4:4:4:5':
{'flags': 'L RPF',
'incoming_interface_list':
{'mdtvpn1':
{'rpf_nbr': '::ffff:200.200.200.200'}},
'outgoing_interface_list':
{'GigabitEthernet150/0/0/6':
{'flags': 'F NS',
'uptime': '00:03:59'}},
'uptime': '00:03:59'}}},
'ff20::/15':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}},
'ff22::/16':
{'source_address':
{'*':
{'flags': 'D P',
'uptime': '00:04:45'}}}}}}}}}
| 47.745972
| 101
| 0.370523
| 4,684
| 50,372
| 3.943638
| 0.075576
| 0.017432
| 0.054569
| 0.014725
| 0.937419
| 0.901635
| 0.883445
| 0.829418
| 0.811932
| 0.742583
| 0
| 0.166139
| 0.523108
| 50,372
| 1,054
| 102
| 47.791271
| 0.602822
| 0.003137
| 0
| 0.747872
| 0
| 0.038298
| 0.582728
| 0.061705
| 0
| 0
| 0.002949
| 0
| 0.002128
| 1
| 0
| false
| 0
| 0.012766
| 0
| 0.034043
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c61bb4ae01854263b5b6efbcb2167a9a1e869d17
| 18,042
|
py
|
Python
|
ChessPieces.py
|
crockct/Colleens_Chess
|
8d5002816ee09b8e485e48b0249c86ea8758c894
|
[
"MIT"
] | 1
|
2019-02-07T19:16:39.000Z
|
2019-02-07T19:16:39.000Z
|
ChessPieces.py
|
crockct/Colleens_Chess
|
8d5002816ee09b8e485e48b0249c86ea8758c894
|
[
"MIT"
] | null | null | null |
ChessPieces.py
|
crockct/Colleens_Chess
|
8d5002816ee09b8e485e48b0249c86ea8758c894
|
[
"MIT"
] | null | null | null |
from pygame import sprite, image
colNumber = {"a": 0, "b": 1, "c": 2, "d": 3, "e": 4, "f": 5, "g": 6, "h": 7}
#gives the character, as a string, corresponding to the column num
def colStr(num):
num = int(num)
num = num%8 #deals with negative numbers
if num == 0:
return 'a'
elif num == 1:
return 'b'
elif num == 2:
return 'c'
elif num == 3:
return 'd'
elif num == 4:
return 'e'
elif num == 5:
return 'f'
elif num == 6:
return 'g'
else:
return 'h'
class Piece(sprite.Sprite):
def __init__(self, color, board, col, row):
sprite.Sprite.__init__(self)
self.color = color
self.col = col
self.row = row
self.rect = board.squareDic[self.col+self.row].get_rect()
self.rect.centerx = board.squareDic[self.col+self.row].get_rect().centerx
self.rect.centery = board.squareDic[self.col+self.row].get_rect().centery
self.onBoard = True
board.squareDic[str(col)+str(row)].piece = self
self.hasMoved = False #used for Castling
self.setPic()
# Requires Capture or not isOccupied
def moveTo(self, board, col, row):
board.squareDic[str(col)+str(row)].piece = self
self.col = col
self.row = row
self.hasMoved = True
self.rect = board.squareDic[self.col+self.row].get_rect()
class Rook(Piece): #Rook is a subclass of piece and inherits its attributes and methods
def setPic(self):
if (self.color == 'black'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\BlackRook.png").convert_alpha()
# convert alpha preserves per pixel transparency
elif (self.color == 'white'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\whiteRook.png").convert_alpha()
else:
print "Color issue with Rook"
def __str__(self):
return self.color + " rook"
#returns True or String error message
#requires that currentRow and destRow are numbers
def validMove(self, board, currentCol, currentRow, destCol, destRow): #row is y value
#input is string values below values (X Y) are numerical ints
if board.isOccupied(destCol, destRow) and board.squareDic[destCol + destRow].piece.color == self.color:
return False, "Can't capture your own piece"
currentX = colNumber[currentCol]
currentY = int(currentRow)
destX = colNumber[destCol]
destY = int(destRow)
if (currentX == destX and currentY == destY): #doesn't actually move
return (False, "same dest and current loc")
elif destX > 7 or destX < 0 or destY > 7 or destX < 0: #goes off grid
return (False, "off grid")
elif currentX == destX:
if abs(currentY - destY) == 1: #no spaces in between
return (True, "")
elif currentY < destY:
tempY = currentY + 1
while tempY < destY:
if board.isOccupied(destCol, str(tempY)): # col, row
return (False, "Rook cannot leap over other pieces")
tempY +=1
return (True, "")
else: #currentY > destY, need to decrement tempY
tempY = currentY - 1
while tempY > destY:
if board.isOccupied(destCol, tempY): # col, row
return (False, "Rook cannot leap over other pieces.")
tempY -=1
return (True, "")
elif currentY == destY:
numToCheck = abs(currentY - destY) #actually numtoCheck +1
if numToCheck == 1:
return (True, "") #no spaces in between
sList = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']
i = 0
s = sList[i]
while currentCol != s and destCol != s:
i+=1
s = sList[i]
j = 1
while j < numToCheck:
if board.isOccupied(sList[i+j], destY):
return (False, "This piece cannot leap over other pieces.")
j+=1
return (True, "")
else:
return (False, "not valid rook move")
class Knight(Piece):
def setPic(self):
if (self.color == 'black'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\BlackKnight.png").convert_alpha()
# convert alpha preserves per pixel transparency
elif (self.color == 'white'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\whiteKnight.png").convert_alpha()
else:
print "Color issue with Knight"
def __str__(self):
return self.color + " knight"
#reutrns True or a string error message
def validMove(self, board, currentCol, currentRow, destCol, destRow): #row is y value
#input is string values below values (X Y) are numerical ints
if board.isOccupied(destCol, destRow) and board.squareDic[destCol + destRow].piece.color == self.color:
return False, "Can't capture your own piece"
currentX = colNumber[currentCol]
currentY = int(currentRow)
destX = colNumber[destCol]
destY = int(destRow)
if (currentX == destX and currentY == destY): #doesn't actually move
return False, "That's not moving1"
elif destX > 7 or destX < 0 or destY > 7 or destX < 0: #goes off grid
return False, "You cannot move your piece off the grid"
else:
absX = abs(destX-currentX)
absY = abs(destY-currentY)
if absY == 2 and absX == 1 or absY ==1 and absX == 2:
return True, ""
else:
return False, "Knights cannot move in this way"
class Bishop(Piece):
def setPic(self):
if (self.color == 'black'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\BlackBishop.png").convert_alpha()
# convert alpha preserves per pixel transparency
elif (self.color == 'white'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\whiteBishop.png").convert_alpha()
else:
print "Color issue with Bishop"
def __str__(self):
return self.color + " bishop"
# returns True or a string error message
def validMove(self, board, currentCol, currentRow, destCol, destRow): #row is y value
#input is string values below values (X Y) are numerical ints
if board.isOccupied(destCol, destRow) and board.squareDic[destCol + destRow].piece.color == self.color:
return False, "Can't capture your own piece"
currentX = colNumber[currentCol]
currentY = int(currentRow)
destX = colNumber[destCol]
destY = int(destRow)
absX = abs(destX-currentX)
absY = abs(destY-currentY)
if (absX == 0 and absY == 0): #doesn't actually move
return False, "That's not moving!"
elif destX > 7 or destX < 0 or destY > 7 or destX < 0: #goes off grid
return False, "You can't move your piece off the grid"
else:
if absX != absY:
return False, "Bishops cannot move in this way"
else: #need to ensure that spaces inbetween are unoccupied
if absX == 1:
return True, "" #no spaces in between
else:
x, y = 1, 1 #y corresponds to row, x corresponds to col
if currentX > destX:
x = -1
if currentY > destY:
y = -1
mag = 1 #magnitude
while mag < absX: #absX = numSpaces inbetween +1
if board.isOccupied(colStr(currentX + mag*x), str(currentY + mag*y)):
return False, "Bishop cannot leap over other pieces."
mag += 1
return True, ""
class Queen(Piece):
def setPic(self):
if (self.color == 'black'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\BlackQueen.png").convert_alpha()
# convert alpha preserves per pixel transparency
elif (self.color == 'white'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\whiteQueen.png").convert_alpha()
else:
print "Color issue with Queen"
def __str__(self):
return self.color + " queen"
# returns True or a string error message
def validMove(self, board, currentCol, currentRow, destCol, destRow): #row is y value
#input is string values below values (X Y) are numerical ints
if board.isOccupied(destCol, destRow) and board.squareDic[destCol + destRow].piece.color == self.color:
return False, "Can't capture your own piece"
currentX = colNumber[currentCol]
currentY = int(currentRow)
destX = colNumber[destCol]
destY = int(destRow)
absX = abs(destX-currentX)
absY = abs(destY-currentY)
if (absX == 0 and absY == 0): #doesn't move
return False, "That's not moving!"
elif destX > 7 or destX < 0 or destY > 7 or destX < 0: #goes off grid
return False, "You cannot move your piece off the grid"
else:
if absX != absY: #not valid for bishop, check Rook Moves
if currentX == destX:
if abs(currentY - destY) == 1: #no spaces in between
return True, ""
elif currentY < destY:
tempY = currentY + 1
while tempY < destY:
if board.isOccupied(destCol, str(tempY)): # col, row
return False, "Queen cannot leap over other pieces."
tempY +=1
return True, ""
else: #currentY > destY, need to decrement tempY
tempY = currentY - 1
while tempY > destY:
if board.isOccupied(destCol, tempY): # col, row
return False, "Queen cannot leap over other pieces."
tempY -=1
return True, ""
elif currentY == destY:
numToCheck = abs(currentY - destY) #actually numtoCheck +1
if numToCheck == 1:
return True, "" #no spaces in between
sList = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']
i = 0
s = sList[i]
while currentCol != s and destCol != s:
i+=1
s = sList[i]
j = 1
while j < numToCheck:
if board.isOccupied(sList[i+j], destY):
return False, "Queen cannot leap over other pieces."
j+=1
return True, "" #moving in X direction with no pieces in between
else:
return False, "Queen cannot move in this way" #not valid rook or bishop move
else: # absX == absY, so moving like a bishop.
#need to ensure that spaces inbetween are unoccupied
if absX == 1:
return True, "" #no spaces in between
else:
x, y = 1, 1 #y corresponds to row, x corresponds to col
if currentX > destX:
x = -1
if currentY > destY:
y = -1
mag = 1 #magnitude
while mag < absX: #absX = numSpaces inbetween +1
if board.isOccupied(colStr(currentX + mag*x), str(currentY + mag*y)):
return False, "Queen cannot leap over other pieces."
mag += 1
return True, ""
class King(Piece):
def setPic(self):
if (self.color == 'black'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\BlackKing.png").convert_alpha()
# convert alpha preserves per pixel transparency
elif (self.color == 'white'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\whiteKing.png").convert_alpha()
else:
print "Color issue with King"
def __str__(self):
return self.color + " king"
#Returns True, or a string error message
def validMove(self, board, currentCol, currentRow, destCol, destRow): #row is y value
#input is string values, below values (X Y) are numerical ints
currentX = colNumber[currentCol]
currentY = int(currentRow)
destX = colNumber[destCol]
destY = int(destRow)
if destX < 0 or destY < 0:
return False, "Not on board"
absX = abs(destX-currentX)
absY = abs(destY-currentY)
if (absX == 0 and absY == 0):
return False, "That's not moving!"
elif destX > 7 or destX < 0 or destY > 7 or destX < 0: #goes off grid
return False, "You can't move your piece off the grid!"
elif absX <= 1 and absY <= 1:
if board.isOccupied(destCol, destRow) and board.squareDic[destCol + destRow].piece.color == self.color:
return False, "Can't capture your own piece"
return True, ""
else:
return False, "Kings cannot move in this way"
class Pawn(Piece):
def __init__(self, color, board, col):
sprite.Sprite.__init__(self)
self.color = color
self.col = col
self.setPic()
if color == 'black':
self.row = '6'
elif color == 'white':
self.row = '1'
else:
print "Pawn color error"
self.onBoard = True
startSquare = board.squareDic[self.col+self.row]
self.rect = startSquare.get_rect()
startSquare.piece = self
def setPic(self):
if (self.color == 'black'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\BlackPawn.png").convert_alpha()
# convert alpha preserves per pixel transparency
elif (self.color == 'white'):
self.image = image.load("C:\Users\Colleen\Pictures\usedInSomething\whitePawn.png").convert_alpha()
else:
print "Color issue with Pawn"
def __str__(self):
return self.color + " pawn"
#Returns True, or a string error message
def validMove(self, board, currentCol, currentRow, destCol, destRow): #row is y value
#input is string values below values (X Y) are numerical ints
if board.isOccupied(destCol, destRow) and board.squareDic[destCol + destRow].piece.color == self.color:
return False, "Can't capture your own piece"
currentX = colNumber[currentCol]
currentY = int(currentRow)
destX = colNumber[destCol]
destY = int(destRow)
absX = abs(destX - currentX)
if (currentY == destY):
if absX == 0: #doesn't actually move
return False, "That's not moving!"
else:
return False, "Pawns can't move directly sideways!"
elif destX > 7 or destX < 0 or destY > 7 or destY < 0: #goes off grid
return False, "You can't move your piece off the grid!"
#WHITE ALWAYS STARTS IN ROWS ZERO AND 1
elif self.color == 'white':
if destY - currentY <0:
return False, "Pawns can't move backwards!"
elif absX==0:
if board.isOccupied(destCol, destRow):
return False, "Pawn cannot capture directly in front of itself"
if currentY == 1 and destY == 3:
if board.isOccupied(currentCol, 2): #checks square in between
return False, "Pawn cannot jump over a piece"
return True, ""
elif destY == currentY +1:
return True, ""
else:
return False, "Pawns can't move that far"
elif absX == 1 and destY - currentY == 1:
if board.isOccupied(destCol, destRow):
return True, ""
else:
return False, "Pawn can only move that way when capturing"
else:
return False, "This piece can't move that many spaces now."
elif self.color == 'black':
if destY - currentY > 0:
return False, "Pawns can't move backwards!"
elif absX==0:
if board.isOccupied(destCol, destRow):
return False, "Pawn cannot capture directly in front of itself"
if currentY == 6 and destY == 4:
if board.isOccupied(currentCol, 5): #checks square in between
return False, "Pawn cannot jump over a piece"
return True, ""
elif destY == currentY - 1:
return True, ""
else:
return False, "Pawns can't move that far"
elif absX == 1 and destY - currentY == -1:
if board.isOccupied(destCol, destRow):
return (True, "")
else:
return False, "Pawn can only move that way when capturing"
else:
return False, "This piece can't move that many spaces now."
else:
return False, "Color Error"
| 44.112469
| 115
| 0.533533
| 2,073
| 18,042
| 4.616015
| 0.10275
| 0.052879
| 0.035531
| 0.035113
| 0.84293
| 0.833734
| 0.803428
| 0.797785
| 0.765597
| 0.744487
| 0
| 0.009959
| 0.371079
| 18,042
| 408
| 116
| 44.220588
| 0.833348
| 0.121162
| 0
| 0.711048
| 0
| 0
| 0.149816
| 0.042443
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.002833
| null | null | 0.01983
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c6235607674458d5fd995385f40c5c0e5591fb78
| 13,173
|
py
|
Python
|
src/test/parser/pattern/test_nodes.py
|
ajit2688/program-y-chatbot
|
f0a7eb33be2ec8de630644a6393296ddd2576eee
|
[
"MIT"
] | null | null | null |
src/test/parser/pattern/test_nodes.py
|
ajit2688/program-y-chatbot
|
f0a7eb33be2ec8de630644a6393296ddd2576eee
|
[
"MIT"
] | null | null | null |
src/test/parser/pattern/test_nodes.py
|
ajit2688/program-y-chatbot
|
f0a7eb33be2ec8de630644a6393296ddd2576eee
|
[
"MIT"
] | null | null | null |
from programy.dialog import Sentence
from programy.parser.pattern.nodes import *
from programy.parser.template.nodes import TemplateNode
from test.parser.pattern.base import PatternTestBaseClass
class PatternNodeTests(PatternTestBaseClass):
def test_init(self):
node = PatternNode()
self.assertIsNotNone(node)
self.assertFalse(node.is_root())
self.assertFalse(node.is_priority())
self.assertFalse(node.is_wildcard())
self.assertFalse(node.is_zero_or_more())
self.assertFalse(node.is_one_or_more())
self.assertFalse(node.has_children())
self.assertIsNotNone(node.children)
self.assertFalse(node.equivalent(PatternNode()))
self.assertEqual(node.to_string(), "NODE [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)]")
node.add_child(PatternNode())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.to_string(), "NODE [P(0)^(0)#(0)C(1)_(0)*(0)To(0)Th(0)Te(0)]")
def test_add_child(self):
node = PatternNode()
priority_word1 = PatternPriorityWordNode("pword")
priority_word2 = PatternPriorityWordNode("pword")
node.add_child(priority_word1)
new_node = node.add_child(priority_word2)
self.assertEqual(new_node, priority_word1)
arrow_node1 = PatternZeroOrMoreWildCardNode("^")
arrow_node2 = PatternZeroOrMoreWildCardNode("^")
node.add_child(arrow_node1)
new_node = node.add_child(arrow_node2)
self.assertEqual(new_node, arrow_node1)
class PatternRootNodeTests(PatternTestBaseClass):
def test_init(self):
node = PatternRootNode()
self.assertIsNotNone(node)
self.assertTrue(node.is_root())
self.assertFalse(node.is_priority())
self.assertFalse(node.is_wildcard())
self.assertFalse(node.is_zero_or_more())
self.assertFalse(node.is_one_or_more())
self.assertIsNotNone(node.children)
self.assertFalse(node.has_children())
self.assertTrue(node.equivalent(PatternRootNode()))
self.assertEqual(node.to_string(), "ROOT [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)]")
node.add_child(PatternNode())
self.assertEqual(len(node.children), 1)
self.assertEqual(node.to_string(), "ROOT [P(0)^(0)#(0)C(1)_(0)*(0)To(0)Th(0)Te(0)]")
def test_multiple_roots(self):
node1 = PatternRootNode()
node2 = PatternRootNode()
with self.assertRaises(ParserException) as raised:
node1.can_add(node2)
self.assertTrue(str(raised.exception).startswith("Cannot add root node to existing root node"))
def test_root_added_to_child(self):
node1 = PatternWordNode("test")
node2 = PatternRootNode()
with self.assertRaises(ParserException) as raised:
node1.can_add(node2)
self.assertTrue(str(raised.exception).startswith("Cannot add root node to child node"))
class PatternTopicNodeTests(PatternTestBaseClass):
def test_init(self):
node = PatternTopicNode()
self.assertIsNotNone(node)
self.assertFalse(node.is_root())
self.assertFalse(node.is_priority())
self.assertFalse(node.is_wildcard())
self.assertFalse(node.is_zero_or_more())
self.assertFalse(node.is_one_or_more())
self.assertIsNotNone(node.children)
self.assertFalse(node.has_children())
self.assertTrue(node.equivalent(PatternTopicNode()))
self.assertEqual(node.to_string(), "TOPIC [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)]")
def test_topic_to_root(self):
node1 = PatternRootNode()
node2 = PatternTopicNode()
with self.assertRaises(ParserException) as raised:
node1.can_add(node2)
self.assertEqual(str(raised.exception), "Cannot add topic node to root node")
def test_multiple_topics(self):
node1 = PatternTopicNode()
node2 = PatternTopicNode()
with self.assertRaises(ParserException) as raised:
node1.can_add(node2)
self.assertEqual(str(raised.exception), "Cannot add topic node to topic node")
class PatternThatNodeTests(PatternTestBaseClass):
def test_init(self):
node = PatternThatNode()
self.assertIsNotNone(node)
self.assertFalse(node.is_root())
self.assertFalse(node.is_priority())
self.assertFalse(node.is_wildcard())
self.assertFalse(node.is_zero_or_more())
self.assertFalse(node.is_one_or_more())
self.assertIsNotNone(node.children)
self.assertFalse(node.has_children())
self.assertTrue(node.equivalent(PatternThatNode()))
self.assertEqual(node.to_string(), "THAT [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)]")
def test_that_to_root(self):
node1 = PatternRootNode()
node2 = PatternThatNode()
with self.assertRaises(ParserException) as raised:
node1.can_add(node2)
self.assertEqual(str(raised.exception), "Cannot add that node to root node")
def test_multiple_thats(self):
node1 = PatternThatNode()
node2 = PatternThatNode()
with self.assertRaises(ParserException) as raised:
node1.can_add(node2)
self.assertEqual(str(raised.exception), "Cannot add that node to that node")
class PatternTemplateNodeTests(PatternTestBaseClass):
def test_init(self):
node = PatternTemplateNode(TemplateNode())
self.assertIsNotNone(node)
self.assertFalse(node.is_root())
self.assertFalse(node.is_priority())
self.assertFalse(node.is_wildcard())
self.assertFalse(node.is_zero_or_more())
self.assertFalse(node.is_one_or_more())
self.assertIsNotNone(node.children)
self.assertFalse(node.has_children())
self.assertTrue(node.equivalent(PatternTemplateNode(TemplateNode())))
self.assertEqual(node.to_string(), "PTEMPLATE [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(1)] ")
def test_template_to_root(self):
node1 = PatternRootNode()
node2 = PatternTemplateNode(TemplateNode())
with self.assertRaises(ParserException) as raised:
node1.can_add(node2)
self.assertEqual(str(raised.exception), "Cannot add template node to root node")
def test_multiple_templates(self):
node1 = PatternTemplateNode(TemplateNode())
node2 = PatternTemplateNode(TemplateNode())
with self.assertRaises(ParserException) as raised:
node1.can_add(node2)
self.assertEqual(str(raised.exception), "Cannot add template node to template node")
class PatternWordNodeTests(PatternTestBaseClass):
def test_init(self):
node = PatternWordNode("test1")
self.assertFalse(node.is_root())
self.assertFalse(node.is_priority())
self.assertFalse(node.is_wildcard())
self.assertFalse(node.is_zero_or_more())
self.assertFalse(node.is_one_or_more())
self.assertIsNotNone(node.children)
self.assertFalse(node.has_children())
self.assertTrue(node.equivalent(PatternWordNode("test1")))
self.assertFalse(node.is_root())
self.assertEqual(node.to_string(), "WORD [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)] word=[test1]")
self.assertTrue(node.matches(self.bot, self.clientid, "test1"))
node.add_child(PatternWordNode("test2"))
self.assertEqual(len(node.children), 1)
self.assertEqual(node.to_string(), "WORD [P(0)^(0)#(0)C(1)_(0)*(0)To(0)Th(0)Te(0)] word=[test1]")
self.assertIsNotNone(node.matches(self.bot, self.clientid, Sentence("test1")))
class PatternPriorityWordNodeTests(PatternTestBaseClass):
def test_init(self):
node = PatternPriorityWordNode("test1")
self.assertIsNotNone(node)
self.assertFalse(node.is_root())
self.assertTrue(node.is_priority())
self.assertFalse(node.is_wildcard())
self.assertFalse(node.is_zero_or_more())
self.assertFalse(node.is_one_or_more())
self.assertIsNotNone(node.children)
self.assertFalse(node.has_children())
self.assertTrue(node.equivalent(PatternPriorityWordNode("test1")))
self.assertFalse(node.is_root())
self.assertEqual(node.to_string(), "PWORD [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)] word=[test1]")
self.assertTrue(node.matches(self.bot, self.clientid, "test1"))
node.add_child(PatternWordNode("test2"))
self.assertEqual(len(node.children), 1)
self.assertEqual(node.to_string(), "PWORD [P(0)^(0)#(0)C(1)_(0)*(0)To(0)Th(0)Te(0)] word=[test1]")
self.assertTrue(node.matches(self.bot, self.clientid, "test1"))
class PatternSetNodeTests(PatternTestBaseClass):
def test_init(self):
node = PatternSetNode("test1")
self.assertIsNotNone(node)
self.assertFalse(node.is_root())
self.assertFalse(node.is_priority())
self.assertFalse(node.is_wildcard())
self.assertFalse(node.is_zero_or_more())
self.assertFalse(node.is_one_or_more())
self.assertIsNotNone(node.children)
self.assertFalse(node.has_children())
self.assertTrue(node.equivalent(PatternSetNode("test1")))
self.assertFalse(node.is_root())
self.assertEqual(node.to_string(), "SET [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)] name=[TEST1]")
self.bot.brain.sets._sets["TEST1"] = ["val1", "val2", "val3"]
self.assertTrue(node.matches(self.bot, self.clientid, "val1"))
self.assertTrue(node.matches(self.bot, self.clientid, "val2"))
self.assertFalse(node.matches(self.bot, self.clientid, "val4"))
class PatternBotNodeTests(PatternTestBaseClass):
def test_init(self):
node = PatternBotNode("test1")
self.assertIsNotNone(node)
self.assertFalse(node.is_root())
self.assertFalse(node.is_priority())
self.assertFalse(node.is_wildcard())
self.assertFalse(node.is_zero_or_more())
self.assertFalse(node.is_one_or_more())
self.assertIsNotNone(node.children)
self.assertFalse(node.has_children())
self.assertTrue(node.equivalent(PatternBotNode("test1")))
self.assertFalse(node.is_root())
self.assertEqual(node.to_string(), "BOT [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)] property=[test1]")
self.bot.brain.properties._properties["test1"] = "val1"
self.assertTrue(node.matches(self.bot, self.clientid, "val1"))
self.assertFalse(node.matches(self.bot, self.clientid, "val4"))
class PatternZeroOrMoreWildCardNodeTests(PatternTestBaseClass):
def test_invalid_wildcard(self):
with self.assertRaises(ParserException) as raised:
node = PatternZeroOrMoreWildCardNode("X")
self.assertIsNone(node)
def test_init(self):
node = PatternZeroOrMoreWildCardNode("#")
self.assertFalse(node.is_root())
self.assertFalse(node.is_priority())
self.assertTrue(node.is_wildcard())
self.assertTrue(node.is_zero_or_more())
self.assertFalse(node.is_one_or_more())
self.assertIsNotNone(node.children)
self.assertFalse(node.has_children())
self.assertEqual(node.wildcard, "#")
self.assertTrue(node.equivalent(PatternZeroOrMoreWildCardNode("#")))
self.assertFalse(node.is_root())
self.assertEqual(node.to_string(), "ZEROORMORE [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)] wildcard=[#]")
node = PatternZeroOrMoreWildCardNode("^")
self.assertIsNotNone(node)
self.assertEqual(node.wildcard, "^")
self.assertTrue(node.equivalent(PatternZeroOrMoreWildCardNode("^")))
self.assertFalse(node.is_root())
self.assertEqual(node.to_string(), "ZEROORMORE [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)] wildcard=[^]")
class PatternOneOrMoreWildCardNodeTests(PatternTestBaseClass):
def test_invalid_wildcard(self):
with self.assertRaises(ParserException) as raised:
node = PatternOneOrMoreWildCardNode("X")
self.assertIsNone(node)
def test_init(self):
node = PatternOneOrMoreWildCardNode("*")
self.assertIsNotNone(node)
self.assertFalse(node.is_root())
self.assertFalse(node.is_priority())
self.assertTrue(node.is_wildcard())
self.assertFalse(node.is_zero_or_more())
self.assertTrue(node.is_one_or_more())
self.assertIsNotNone(node.children)
self.assertFalse(node.has_children())
self.assertEqual(node.wildcard, "*")
self.assertTrue(node.equivalent(PatternOneOrMoreWildCardNode("*")))
self.assertFalse(node.is_root())
self.assertEqual(node.to_string(), "ONEORMORE [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)] wildcard=[*]")
node = PatternOneOrMoreWildCardNode("_")
self.assertIsNotNone(node)
self.assertEqual(node.wildcard, "_")
self.assertTrue(node.equivalent(PatternOneOrMoreWildCardNode("_")))
self.assertFalse(node.is_root())
self.assertEqual(node.to_string(), "ONEORMORE [P(0)^(0)#(0)C(0)_(0)*(0)To(0)Th(0)Te(0)] wildcard=[_]")
| 37.529915
| 111
| 0.665528
| 1,557
| 13,173
| 5.485549
| 0.073218
| 0.124693
| 0.157944
| 0.140148
| 0.805643
| 0.791711
| 0.726964
| 0.709636
| 0.705538
| 0.69102
| 0
| 0.021612
| 0.192135
| 13,173
| 350
| 112
| 37.637143
| 0.780962
| 0
| 0
| 0.601563
| 0
| 0.066406
| 0.104768
| 0.052915
| 0
| 0
| 0
| 0
| 0.644531
| 1
| 0.085938
| false
| 0
| 0.015625
| 0
| 0.144531
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c623a7268204473e3f41ed709c00838a54eceace
| 5,079
|
py
|
Python
|
model/main/traditional_rnn.py
|
chasebk/flnn_code
|
a561d4c697d1aa545a677f9e7d126ace7bb40068
|
[
"Apache-2.0"
] | 36
|
2019-07-28T02:26:28.000Z
|
2022-03-29T03:00:56.000Z
|
model/main/traditional_rnn.py
|
chasebk/flnn_code
|
a561d4c697d1aa545a677f9e7d126ace7bb40068
|
[
"Apache-2.0"
] | 1
|
2021-09-14T13:21:54.000Z
|
2021-09-14T13:21:54.000Z
|
model/main/traditional_rnn.py
|
chasebk/flnn_code
|
a561d4c697d1aa545a677f9e7d126ace7bb40068
|
[
"Apache-2.0"
] | 16
|
2020-02-28T06:55:42.000Z
|
2022-03-31T01:58:51.000Z
|
from keras import backend
from keras.models import Sequential
from keras.layers import Dense, LSTM, Dropout
from model.root.traditional.root_rnn import RootRnn
class Rnn1HL(RootRnn):
"""
Recurrent Neural Network (1 Hidden Layer)
"""
def __init__(self, root_base_paras=None, root_rnn_paras=None):
RootRnn.__init__(self, root_base_paras, root_rnn_paras)
self.filename = "RNN-1HL-sliding_{}-net_para_{}".format(root_base_paras["sliding"], [self.hidden_sizes, self.epoch,
self.batch_size, self.learning_rate, self.activations, self.optimizer, self.loss, self.dropouts])
def _training__(self):
# The RNN architecture
self.model = Sequential()
self.model.add(LSTM(units=self.hidden_sizes[0], activation=self.activations[0], input_shape=(self.X_train.shape[1], 1)))
self.model.add(Dropout(self.dropouts[0]))
self.model.add(Dense(units=1, activation=self.activations[1]))
self.model.compile(loss=self.loss, optimizer=self.optimizer)
backend.set_session(backend.tf.Session(config=backend.tf.ConfigProto(intra_op_parallelism_threads=2, inter_op_parallelism_threads=2)))
ml = self.model.fit(self.X_train, self.y_train, epochs=self.epoch, batch_size=self.batch_size, verbose=self.print_train)
self.loss_train = ml.history["loss"]
class Rnn2HL(RootRnn):
"""
Recurrent Neural Network (2 Hidden Layer)
"""
def __init__(self, root_base_paras=None, root_rnn_paras=None):
RootRnn.__init__(self, root_base_paras, root_rnn_paras)
self.filename = "RNN-2HL-sliding_{}-net_para_{}".format(root_base_paras["sliding"], [self.hidden_sizes,
self.epoch, self.batch_size, self.learning_rate, self.activations, self.optimizer, self.loss])
def _training__(self):
# The RNN architecture
self.model = Sequential()
self.model.add(LSTM(units=self.hidden_sizes[0], return_sequences=True, input_shape=(self.X_train.shape[1], 1), activation=self.activations[0]))
self.model.add(Dropout(self.dropouts[0]))
self.model.add(LSTM(units=self.hidden_sizes[1], activation=self.activations[1]))
self.model.add(Dropout(self.dropouts[1]))
self.model.add(Dense(units=1, activation=self.activations[2]))
self.model.compile(loss=self.loss, optimizer=self.optimizer)
backend.set_session(backend.tf.Session(config=backend.tf.ConfigProto(intra_op_parallelism_threads=2, inter_op_parallelism_threads=2)))
ml = self.model.fit(self.X_train, self.y_train, epochs=self.epoch, batch_size=self.batch_size, verbose=self.print_train)
self.loss_train = ml.history["loss"]
class Lstm1HL(RootRnn):
"""
Long-short Term Memory Neural Network (1 Hidden Layer)
"""
def __init__(self, root_base_paras=None, root_rnn_paras=None):
RootRnn.__init__(self, root_base_paras, root_rnn_paras)
self.filename = "LSTM-1HL-sliding_{}-net_para_{}".format(root_base_paras["sliding"], [self.hidden_sizes,
self.epoch, self.batch_size, self.learning_rate, self.activations, self.optimizer, self.loss])
def _training__(self):
# The LSTM architecture
self.model = Sequential()
self.model.add(LSTM(units=self.hidden_sizes[0], input_shape=(None, 1), activation=self.activations[0]))
self.model.add(Dense(units=1, activation=self.activations[1]))
self.model.compile(loss=self.loss, optimizer=self.optimizer)
backend.set_session(backend.tf.Session(config=backend.tf.ConfigProto(intra_op_parallelism_threads=2, inter_op_parallelism_threads=2)))
ml = self.model.fit(self.X_train, self.y_train, epochs=self.epoch, batch_size=self.batch_size, verbose=self.print_train)
self.loss_train = ml.history["loss"]
class Lstm2HL(RootRnn):
"""
Long-short Term Memory Neural Network (2 Hidden Layer)
"""
def __init__(self, root_base_paras=None, root_rnn_paras=None):
RootRnn.__init__(self, root_base_paras, root_rnn_paras)
self.filename = "LSTM-2HL-sliding_{}-net_para_{}".format(root_base_paras["sliding"], [self.hidden_sizes,
self.epoch, self.batch_size, self.learning_rate, self.activations, self.optimizer, self.loss])
def _training__(self):
# The LSTM architecture
self.model = Sequential()
self.model.add(LSTM(units=self.hidden_sizes[0], return_sequences=True, input_shape=(None, 1), activation=self.activations[0]))
self.model.add(LSTM(units=self.hidden_sizes[1], activation=self.activations[1]))
self.model.add(Dense(units=1, activation=self.activations[2]))
self.model.compile(loss=self.loss, optimizer=self.optimizer)
backend.set_session(backend.tf.Session(config=backend.tf.ConfigProto(intra_op_parallelism_threads=2, inter_op_parallelism_threads=2)))
ml = self.model.fit(self.X_train, self.y_train, epochs=self.epoch, batch_size=self.batch_size, verbose=self.print_train)
self.loss_train = ml.history["loss"]
| 56.433333
| 151
| 0.707816
| 697
| 5,079
| 4.906743
| 0.123386
| 0.065789
| 0.045614
| 0.068421
| 0.931287
| 0.931287
| 0.931287
| 0.910526
| 0.890058
| 0.890058
| 0
| 0.011516
| 0.162237
| 5,079
| 89
| 152
| 57.067416
| 0.792244
| 0.055917
| 0
| 0.704918
| 0
| 0
| 0.035177
| 0.025853
| 0
| 0
| 0
| 0
| 0
| 1
| 0.131148
| false
| 0
| 0.065574
| 0
| 0.262295
| 0.065574
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d6b1df426e88dd3ea698a9496e98c1ae3873498f
| 123
|
py
|
Python
|
python/testData/typesFromAttributes/resultsOrdering/module.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/typesFromAttributes/resultsOrdering/module.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/typesFromAttributes/resultsOrdering/module.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class OtherClassA(object):
def sort(self):
pass
class OtherClassB(object):
def sort(self):
pass
| 15.375
| 26
| 0.601626
| 14
| 123
| 5.285714
| 0.571429
| 0.243243
| 0.351351
| 0.459459
| 0.567568
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.300813
| 123
| 8
| 27
| 15.375
| 0.860465
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
d6d47184d73867e409b4d3918be53824517c943d
| 27
|
py
|
Python
|
source/const.py
|
xoar/zuma-fpga
|
76552d44dd97cfa213e642bea81142a9bdeaef20
|
[
"BSD-2-Clause"
] | 20
|
2015-07-04T23:31:25.000Z
|
2022-01-13T06:36:56.000Z
|
source/const.py
|
xoar/zuma-fpga
|
76552d44dd97cfa213e642bea81142a9bdeaef20
|
[
"BSD-2-Clause"
] | 2
|
2016-10-04T19:15:25.000Z
|
2018-09-24T13:45:58.000Z
|
source/const.py
|
adbrant/zuma-fpga
|
7205895cf875a242b0d04dea763bca3d800c8844
|
[
"BSD-2-Clause"
] | 9
|
2015-06-16T19:32:07.000Z
|
2020-05-03T20:24:22.000Z
|
N = 0
S = 1
E = 2
W = 3
| 6.75
| 7
| 0.296296
| 8
| 27
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0.555556
| 27
| 4
| 8
| 6.75
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d6e6cc5b8561460c84e16cbfe8e62a94e209760f
| 403
|
py
|
Python
|
Symbol Patterns/symbolpattern114.py
|
Daksh777/Python-PatternHouse
|
ab801631c2e1f5ed3cc12a26c959d41a5e51273d
|
[
"MIT"
] | 8
|
2021-03-20T11:26:35.000Z
|
2022-01-05T02:39:15.000Z
|
Symbol Patterns/symbolpattern114.py
|
Daksh777/Python-PatternHouse
|
ab801631c2e1f5ed3cc12a26c959d41a5e51273d
|
[
"MIT"
] | 851
|
2021-04-02T09:08:15.000Z
|
2022-01-12T11:26:57.000Z
|
Symbol Patterns/symbolpattern114.py
|
Daksh777/Python-PatternHouse
|
ab801631c2e1f5ed3cc12a26c959d41a5e51273d
|
[
"MIT"
] | 15
|
2021-04-13T06:10:17.000Z
|
2022-01-08T05:07:21.000Z
|
n = 5
for i in range (n):
for j in range (2*n,i,-1):
print(' ', end='')
for k in range (i+1):
print('* ', end='')
print()
for i in range (n):
for j in range (n,i,-1):
print(' ', end='')
for k in range (i+1):
print('* ', end='')
for l in range (i+1, n):
print(' ', end=' ')
for m in range (i+1):
print('* ', end='')
print()
| 22.388889
| 30
| 0.411911
| 66
| 403
| 2.515152
| 0.212121
| 0.337349
| 0.210843
| 0.301205
| 0.843373
| 0.825301
| 0.825301
| 0.662651
| 0.662651
| 0.385542
| 0
| 0.031008
| 0.359801
| 403
| 17
| 31
| 23.705882
| 0.612403
| 0
| 0
| 0.705882
| 0
| 0
| 0.024814
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.470588
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
d6f12088cfe81912e6075142349965752bc15f0f
| 10,651
|
py
|
Python
|
misc/tables6.py
|
alreich/abstract_algebra
|
9aca57cbc002677aeb117f542a961b7cbdfd4c29
|
[
"MIT"
] | 1
|
2021-12-04T11:23:21.000Z
|
2021-12-04T11:23:21.000Z
|
misc/tables6.py
|
alreich/abstract_algebra
|
9aca57cbc002677aeb117f542a961b7cbdfd4c29
|
[
"MIT"
] | null | null | null |
misc/tables6.py
|
alreich/abstract_algebra
|
9aca57cbc002677aeb117f542a961b7cbdfd4c29
|
[
"MIT"
] | null | null | null |
tables6 = \
[([0, 1, 2, 3, 4, 5],
(1, 0, 3, 2, 5, 4),
(2, 3, 4, 5, 0, 1),
(3, 2, 5, 4, 1, 0),
(4, 5, 0, 1, 2, 3),
(5, 4, 1, 0, 3, 2)),
([0, 1, 2, 3, 4, 5],
(1, 0, 3, 2, 5, 4),
(2, 3, 4, 5, 1, 0),
(3, 2, 5, 4, 0, 1),
(4, 5, 1, 0, 3, 2),
(5, 4, 0, 1, 2, 3)),
([0, 1, 2, 3, 4, 5],
(1, 0, 3, 2, 5, 4),
(2, 3, 5, 4, 0, 1),
(3, 2, 4, 5, 1, 0),
(4, 5, 0, 1, 3, 2),
(5, 4, 1, 0, 2, 3)),
([0, 1, 2, 3, 4, 5],
(1, 0, 3, 2, 5, 4),
(2, 3, 5, 4, 1, 0),
(3, 2, 4, 5, 0, 1),
(4, 5, 1, 0, 2, 3),
(5, 4, 0, 1, 3, 2)),
([0, 1, 2, 3, 4, 5],
(1, 0, 3, 2, 5, 4),
(2, 4, 0, 5, 1, 3),
(3, 5, 1, 4, 0, 2),
(4, 2, 5, 0, 3, 1),
(5, 3, 4, 1, 2, 0)),
([0, 1, 2, 3, 4, 5],
(1, 0, 3, 2, 5, 4),
(2, 4, 5, 1, 3, 0),
(3, 5, 4, 0, 2, 1),
(4, 2, 1, 5, 0, 3),
(5, 3, 0, 4, 1, 2)),
([0, 1, 2, 3, 4, 5],
(1, 0, 3, 2, 5, 4),
(2, 5, 0, 4, 3, 1),
(3, 4, 1, 5, 2, 0),
(4, 3, 5, 1, 0, 2),
(5, 2, 4, 0, 1, 3)),
([0, 1, 2, 3, 4, 5],
(1, 0, 3, 2, 5, 4),
(2, 5, 4, 1, 0, 3),
(3, 4, 5, 0, 1, 2),
(4, 3, 0, 5, 2, 1),
(5, 2, 1, 4, 3, 0)),
([0, 1, 2, 3, 4, 5],
(1, 0, 4, 5, 2, 3),
(2, 3, 0, 1, 5, 4),
(3, 2, 5, 4, 0, 1),
(4, 5, 1, 0, 3, 2),
(5, 4, 3, 2, 1, 0)),
([0, 1, 2, 3, 4, 5],
(1, 0, 4, 5, 2, 3),
(2, 3, 5, 4, 1, 0),
(3, 2, 1, 0, 5, 4),
(4, 5, 3, 2, 0, 1),
(5, 4, 0, 1, 3, 2)),
([0, 1, 2, 3, 4, 5],
(1, 0, 4, 5, 2, 3),
(2, 4, 3, 0, 5, 1),
(3, 5, 0, 2, 1, 4),
(4, 2, 5, 1, 3, 0),
(5, 3, 1, 4, 0, 2)),
([0, 1, 2, 3, 4, 5],
(1, 0, 4, 5, 2, 3),
(2, 4, 3, 1, 5, 0),
(3, 5, 1, 4, 0, 2),
(4, 2, 5, 0, 3, 1),
(5, 3, 0, 2, 1, 4)),
([0, 1, 2, 3, 4, 5],
(1, 0, 4, 5, 2, 3),
(2, 4, 5, 0, 3, 1),
(3, 5, 0, 4, 1, 2),
(4, 2, 3, 1, 5, 0),
(5, 3, 1, 2, 0, 4)),
([0, 1, 2, 3, 4, 5],
(1, 0, 4, 5, 2, 3),
(2, 4, 5, 1, 3, 0),
(3, 5, 1, 2, 0, 4),
(4, 2, 3, 0, 5, 1),
(5, 3, 0, 4, 1, 2)),
([0, 1, 2, 3, 4, 5],
(1, 0, 4, 5, 2, 3),
(2, 5, 0, 4, 3, 1),
(3, 4, 5, 0, 1, 2),
(4, 3, 1, 2, 5, 0),
(5, 2, 3, 1, 0, 4)),
([0, 1, 2, 3, 4, 5],
(1, 0, 4, 5, 2, 3),
(2, 5, 3, 0, 1, 4),
(3, 4, 0, 2, 5, 1),
(4, 3, 5, 1, 0, 2),
(5, 2, 1, 4, 3, 0)),
([0, 1, 2, 3, 4, 5],
(1, 0, 5, 4, 3, 2),
(2, 3, 0, 1, 5, 4),
(3, 2, 4, 5, 1, 0),
(4, 5, 3, 2, 0, 1),
(5, 4, 1, 0, 2, 3)),
([0, 1, 2, 3, 4, 5],
(1, 0, 5, 4, 3, 2),
(2, 3, 4, 5, 0, 1),
(3, 2, 1, 0, 5, 4),
(4, 5, 0, 1, 2, 3),
(5, 4, 3, 2, 1, 0)),
([0, 1, 2, 3, 4, 5],
(1, 0, 5, 4, 3, 2),
(2, 4, 0, 5, 1, 3),
(3, 5, 4, 0, 2, 1),
(4, 2, 3, 1, 5, 0),
(5, 3, 1, 2, 0, 4)),
([0, 1, 2, 3, 4, 5],
(1, 0, 5, 4, 3, 2),
(2, 4, 3, 0, 5, 1),
(3, 5, 0, 2, 1, 4),
(4, 2, 1, 5, 0, 3),
(5, 3, 4, 1, 2, 0)),
([0, 1, 2, 3, 4, 5],
(1, 0, 5, 4, 3, 2),
(2, 5, 3, 0, 1, 4),
(3, 4, 0, 2, 5, 1),
(4, 3, 1, 5, 2, 0),
(5, 2, 4, 1, 0, 3)),
([0, 1, 2, 3, 4, 5],
(1, 0, 5, 4, 3, 2),
(2, 5, 3, 1, 0, 4),
(3, 4, 1, 5, 2, 0),
(4, 3, 0, 2, 5, 1),
(5, 2, 4, 0, 1, 3)),
([0, 1, 2, 3, 4, 5],
(1, 0, 5, 4, 3, 2),
(2, 5, 4, 0, 1, 3),
(3, 4, 0, 5, 2, 1),
(4, 3, 1, 2, 5, 0),
(5, 2, 3, 1, 0, 4)),
([0, 1, 2, 3, 4, 5],
(1, 0, 5, 4, 3, 2),
(2, 5, 4, 1, 0, 3),
(3, 4, 1, 2, 5, 0),
(4, 3, 0, 5, 2, 1),
(5, 2, 3, 0, 1, 4)),
([0, 1, 2, 3, 4, 5],
(1, 2, 0, 4, 5, 3),
(2, 0, 1, 5, 3, 4),
(3, 4, 5, 0, 1, 2),
(4, 5, 3, 1, 2, 0),
(5, 3, 4, 2, 0, 1)),
([0, 1, 2, 3, 4, 5],
(1, 2, 0, 4, 5, 3),
(2, 0, 1, 5, 3, 4),
(3, 4, 5, 1, 2, 0),
(4, 5, 3, 2, 0, 1),
(5, 3, 4, 0, 1, 2)),
([0, 1, 2, 3, 4, 5],
(1, 2, 0, 4, 5, 3),
(2, 0, 1, 5, 3, 4),
(3, 4, 5, 2, 0, 1),
(4, 5, 3, 0, 1, 2),
(5, 3, 4, 1, 2, 0)),
([0, 1, 2, 3, 4, 5],
(1, 2, 0, 4, 5, 3),
(2, 0, 1, 5, 3, 4),
(3, 5, 4, 0, 2, 1),
(4, 3, 5, 1, 0, 2),
(5, 4, 3, 2, 1, 0)),
([0, 1, 2, 3, 4, 5],
(1, 2, 0, 5, 3, 4),
(2, 0, 1, 4, 5, 3),
(3, 4, 5, 0, 1, 2),
(4, 5, 3, 2, 0, 1),
(5, 3, 4, 1, 2, 0)),
([0, 1, 2, 3, 4, 5],
(1, 2, 0, 5, 3, 4),
(2, 0, 1, 4, 5, 3),
(3, 5, 4, 0, 2, 1),
(4, 3, 5, 2, 1, 0),
(5, 4, 3, 1, 0, 2)),
([0, 1, 2, 3, 4, 5],
(1, 2, 0, 5, 3, 4),
(2, 0, 1, 4, 5, 3),
(3, 5, 4, 1, 0, 2),
(4, 3, 5, 0, 2, 1),
(5, 4, 3, 2, 1, 0)),
([0, 1, 2, 3, 4, 5],
(1, 2, 0, 5, 3, 4),
(2, 0, 1, 4, 5, 3),
(3, 5, 4, 2, 1, 0),
(4, 3, 5, 1, 0, 2),
(5, 4, 3, 0, 2, 1)),
([0, 1, 2, 3, 4, 5],
(1, 2, 3, 4, 5, 0),
(2, 3, 4, 5, 0, 1),
(3, 4, 5, 0, 1, 2),
(4, 5, 0, 1, 2, 3),
(5, 0, 1, 2, 3, 4)),
([0, 1, 2, 3, 4, 5],
(1, 2, 3, 5, 0, 4),
(2, 3, 5, 4, 1, 0),
(3, 5, 4, 0, 2, 1),
(4, 0, 1, 2, 5, 3),
(5, 4, 0, 1, 3, 2)),
([0, 1, 2, 3, 4, 5],
(1, 2, 4, 0, 5, 3),
(2, 4, 5, 1, 3, 0),
(3, 0, 1, 5, 2, 4),
(4, 5, 3, 2, 0, 1),
(5, 3, 0, 4, 1, 2)),
([0, 1, 2, 3, 4, 5],
(1, 2, 4, 5, 3, 0),
(2, 4, 3, 0, 5, 1),
(3, 5, 0, 2, 1, 4),
(4, 3, 5, 1, 0, 2),
(5, 0, 1, 4, 2, 3)),
([0, 1, 2, 3, 4, 5],
(1, 2, 5, 0, 3, 4),
(2, 5, 4, 1, 0, 3),
(3, 0, 1, 4, 5, 2),
(4, 3, 0, 5, 2, 1),
(5, 4, 3, 2, 1, 0)),
([0, 1, 2, 3, 4, 5],
(1, 2, 5, 4, 0, 3),
(2, 5, 3, 0, 1, 4),
(3, 4, 0, 2, 5, 1),
(4, 0, 1, 5, 3, 2),
(5, 3, 4, 1, 2, 0)),
([0, 1, 2, 3, 4, 5],
(1, 3, 0, 4, 5, 2),
(2, 0, 5, 1, 3, 4),
(3, 4, 1, 5, 2, 0),
(4, 5, 3, 2, 0, 1),
(5, 2, 4, 0, 1, 3)),
([0, 1, 2, 3, 4, 5],
(1, 3, 0, 5, 2, 4),
(2, 0, 4, 1, 5, 3),
(3, 5, 1, 4, 0, 2),
(4, 2, 5, 0, 3, 1),
(5, 4, 3, 2, 1, 0)),
([0, 1, 2, 3, 4, 5],
(1, 3, 4, 0, 5, 2),
(2, 4, 0, 5, 1, 3),
(3, 0, 5, 1, 2, 4),
(4, 5, 1, 2, 3, 0),
(5, 2, 3, 4, 0, 1)),
([0, 1, 2, 3, 4, 5],
(1, 3, 4, 0, 5, 2),
(2, 4, 1, 5, 3, 0),
(3, 0, 5, 1, 2, 4),
(4, 5, 3, 2, 0, 1),
(5, 2, 0, 4, 1, 3)),
([0, 1, 2, 3, 4, 5],
(1, 3, 4, 0, 5, 2),
(2, 4, 3, 5, 0, 1),
(3, 0, 5, 1, 2, 4),
(4, 5, 0, 2, 1, 3),
(5, 2, 1, 4, 3, 0)),
([0, 1, 2, 3, 4, 5],
(1, 3, 4, 0, 5, 2),
(2, 5, 0, 4, 3, 1),
(3, 0, 5, 1, 2, 4),
(4, 2, 1, 5, 0, 3),
(5, 4, 3, 2, 1, 0)),
([0, 1, 2, 3, 4, 5],
(1, 3, 4, 2, 5, 0),
(2, 4, 0, 5, 1, 3),
(3, 2, 5, 4, 0, 1),
(4, 5, 1, 0, 3, 2),
(5, 0, 3, 1, 2, 4)),
([0, 1, 2, 3, 4, 5],
(1, 3, 4, 5, 0, 2),
(2, 4, 3, 0, 5, 1),
(3, 5, 0, 2, 1, 4),
(4, 0, 5, 1, 2, 3),
(5, 2, 1, 4, 3, 0)),
([0, 1, 2, 3, 4, 5],
(1, 3, 5, 0, 2, 4),
(2, 4, 0, 5, 1, 3),
(3, 0, 4, 1, 5, 2),
(4, 5, 3, 2, 0, 1),
(5, 2, 1, 4, 3, 0)),
([0, 1, 2, 3, 4, 5],
(1, 3, 5, 0, 2, 4),
(2, 5, 0, 4, 3, 1),
(3, 0, 4, 1, 5, 2),
(4, 2, 3, 5, 1, 0),
(5, 4, 1, 2, 0, 3)),
([0, 1, 2, 3, 4, 5],
(1, 3, 5, 0, 2, 4),
(2, 5, 1, 4, 0, 3),
(3, 0, 4, 1, 5, 2),
(4, 2, 0, 5, 3, 1),
(5, 4, 3, 2, 1, 0)),
([0, 1, 2, 3, 4, 5],
(1, 3, 5, 0, 2, 4),
(2, 5, 3, 4, 1, 0),
(3, 0, 4, 1, 5, 2),
(4, 2, 1, 5, 0, 3),
(5, 4, 0, 2, 3, 1)),
([0, 1, 2, 3, 4, 5],
(1, 3, 5, 2, 0, 4),
(2, 5, 0, 4, 3, 1),
(3, 2, 4, 5, 1, 0),
(4, 0, 3, 1, 5, 2),
(5, 4, 1, 0, 2, 3)),
([0, 1, 2, 3, 4, 5],
(1, 3, 5, 4, 2, 0),
(2, 5, 3, 0, 1, 4),
(3, 4, 0, 2, 5, 1),
(4, 2, 1, 5, 0, 3),
(5, 0, 4, 1, 3, 2)),
([0, 1, 2, 3, 4, 5],
(1, 4, 0, 2, 5, 3),
(2, 0, 3, 5, 1, 4),
(3, 2, 5, 4, 0, 1),
(4, 5, 1, 0, 3, 2),
(5, 3, 4, 1, 2, 0)),
([0, 1, 2, 3, 4, 5],
(1, 4, 0, 5, 3, 2),
(2, 0, 5, 4, 1, 3),
(3, 5, 4, 0, 2, 1),
(4, 3, 1, 2, 5, 0),
(5, 2, 3, 1, 0, 4)),
([0, 1, 2, 3, 4, 5],
(1, 4, 3, 0, 5, 2),
(2, 3, 4, 5, 0, 1),
(3, 0, 5, 2, 1, 4),
(4, 5, 0, 1, 2, 3),
(5, 2, 1, 4, 3, 0)),
([0, 1, 2, 3, 4, 5],
(1, 4, 3, 5, 0, 2),
(2, 3, 0, 1, 5, 4),
(3, 5, 1, 4, 2, 0),
(4, 0, 5, 2, 1, 3),
(5, 2, 4, 0, 3, 1)),
([0, 1, 2, 3, 4, 5],
(1, 4, 3, 5, 0, 2),
(2, 3, 1, 4, 5, 0),
(3, 5, 4, 0, 2, 1),
(4, 0, 5, 2, 1, 3),
(5, 2, 0, 1, 3, 4)),
([0, 1, 2, 3, 4, 5],
(1, 4, 3, 5, 0, 2),
(2, 3, 4, 0, 5, 1),
(3, 5, 0, 1, 2, 4),
(4, 0, 5, 2, 1, 3),
(5, 2, 1, 4, 3, 0)),
([0, 1, 2, 3, 4, 5],
(1, 4, 3, 5, 0, 2),
(2, 5, 0, 4, 3, 1),
(3, 2, 1, 0, 5, 4),
(4, 0, 5, 2, 1, 3),
(5, 3, 4, 1, 2, 0)),
([0, 1, 2, 3, 4, 5],
(1, 4, 3, 5, 2, 0),
(2, 3, 0, 1, 5, 4),
(3, 5, 1, 4, 0, 2),
(4, 2, 5, 0, 3, 1),
(5, 0, 4, 2, 1, 3)),
([0, 1, 2, 3, 4, 5],
(1, 4, 5, 0, 2, 3),
(2, 5, 0, 4, 3, 1),
(3, 0, 4, 5, 1, 2),
(4, 2, 3, 1, 5, 0),
(5, 3, 1, 2, 0, 4)),
([0, 1, 2, 3, 4, 5],
(1, 4, 5, 2, 0, 3),
(2, 3, 0, 1, 5, 4),
(3, 5, 4, 0, 2, 1),
(4, 0, 3, 5, 1, 2),
(5, 2, 1, 4, 3, 0)),
([0, 1, 2, 3, 4, 5],
(1, 4, 5, 2, 0, 3),
(2, 5, 0, 4, 3, 1),
(3, 2, 4, 1, 5, 0),
(4, 0, 3, 5, 1, 2),
(5, 3, 1, 0, 2, 4)),
([0, 1, 2, 3, 4, 5],
(1, 4, 5, 2, 0, 3),
(2, 5, 1, 0, 3, 4),
(3, 2, 0, 4, 5, 1),
(4, 0, 3, 5, 1, 2),
(5, 3, 4, 1, 2, 0)),
([0, 1, 2, 3, 4, 5],
(1, 4, 5, 2, 0, 3),
(2, 5, 4, 1, 3, 0),
(3, 2, 1, 0, 5, 4),
(4, 0, 3, 5, 1, 2),
(5, 3, 0, 4, 2, 1)),
([0, 1, 2, 3, 4, 5],
(1, 4, 5, 2, 3, 0),
(2, 5, 4, 1, 0, 3),
(3, 2, 1, 0, 5, 4),
(4, 3, 0, 5, 2, 1),
(5, 0, 3, 4, 1, 2)),
([0, 1, 2, 3, 4, 5],
(1, 5, 0, 2, 3, 4),
(2, 0, 3, 4, 5, 1),
(3, 2, 4, 5, 1, 0),
(4, 3, 5, 1, 0, 2),
(5, 4, 1, 0, 2, 3)),
([0, 1, 2, 3, 4, 5],
(1, 5, 0, 4, 2, 3),
(2, 0, 4, 5, 3, 1),
(3, 4, 5, 0, 1, 2),
(4, 2, 3, 1, 5, 0),
(5, 3, 1, 2, 0, 4)),
([0, 1, 2, 3, 4, 5],
(1, 5, 3, 0, 2, 4),
(2, 3, 5, 4, 1, 0),
(3, 0, 4, 2, 5, 1),
(4, 2, 1, 5, 0, 3),
(5, 4, 0, 1, 3, 2)),
([0, 1, 2, 3, 4, 5],
(1, 5, 3, 4, 0, 2),
(2, 3, 0, 1, 5, 4),
(3, 4, 1, 5, 2, 0),
(4, 0, 5, 2, 3, 1),
(5, 2, 4, 0, 1, 3)),
([0, 1, 2, 3, 4, 5],
(1, 5, 3, 4, 2, 0),
(2, 3, 0, 1, 5, 4),
(3, 4, 1, 5, 0, 2),
(4, 2, 5, 0, 1, 3),
(5, 0, 4, 2, 3, 1)),
([0, 1, 2, 3, 4, 5],
(1, 5, 3, 4, 2, 0),
(2, 3, 1, 5, 0, 4),
(3, 4, 5, 0, 1, 2),
(4, 2, 0, 1, 5, 3),
(5, 0, 4, 2, 3, 1)),
([0, 1, 2, 3, 4, 5],
(1, 5, 3, 4, 2, 0),
(2, 3, 5, 0, 1, 4),
(3, 4, 0, 1, 5, 2),
(4, 2, 1, 5, 0, 3),
(5, 0, 4, 2, 3, 1)),
([0, 1, 2, 3, 4, 5],
(1, 5, 3, 4, 2, 0),
(2, 4, 0, 5, 1, 3),
(3, 2, 1, 0, 5, 4),
(4, 3, 5, 1, 0, 2),
(5, 0, 4, 2, 3, 1)),
([0, 1, 2, 3, 4, 5],
(1, 5, 4, 0, 3, 2),
(2, 4, 0, 5, 1, 3),
(3, 0, 5, 4, 2, 1),
(4, 3, 1, 2, 5, 0),
(5, 2, 3, 1, 0, 4)),
([0, 1, 2, 3, 4, 5],
(1, 5, 4, 2, 0, 3),
(2, 4, 5, 1, 3, 0),
(3, 2, 1, 0, 5, 4),
(4, 0, 3, 5, 2, 1),
(5, 3, 0, 4, 1, 2)),
([0, 1, 2, 3, 4, 5],
(1, 5, 4, 2, 3, 0),
(2, 3, 0, 1, 5, 4),
(3, 4, 5, 0, 1, 2),
(4, 2, 1, 5, 0, 3),
(5, 0, 3, 4, 2, 1)),
([0, 1, 2, 3, 4, 5],
(1, 5, 4, 2, 3, 0),
(2, 4, 0, 5, 1, 3),
(3, 2, 5, 1, 0, 4),
(4, 3, 1, 0, 5, 2),
(5, 0, 3, 4, 2, 1)),
([0, 1, 2, 3, 4, 5],
(1, 5, 4, 2, 3, 0),
(2, 4, 1, 0, 5, 3),
(3, 2, 0, 5, 1, 4),
(4, 3, 5, 1, 0, 2),
(5, 0, 3, 4, 2, 1)),
([0, 1, 2, 3, 4, 5],
(1, 5, 4, 2, 3, 0),
(2, 4, 5, 1, 0, 3),
(3, 2, 1, 0, 5, 4),
(4, 3, 0, 5, 1, 2),
(5, 0, 3, 4, 2, 1))]
| 22.143451
| 22
| 0.271054
| 2,881
| 10,651
| 1.002083
| 0.00243
| 0.110842
| 0.103914
| 0.119155
| 0.963976
| 0.906824
| 0.892622
| 0.882923
| 0.751992
| 0.654312
| 0
| 0.418142
| 0.353112
| 10,651
| 481
| 23
| 22.143451
| 0.000871
| 0
| 0
| 0.744283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ba42e8af4dfd2e7e71e4159ad4ae88d49cdc1f19
| 4,667
|
py
|
Python
|
survae/transforms/bijections/actnorm.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 262
|
2020-07-05T20:57:44.000Z
|
2022-03-28T02:24:43.000Z
|
survae/transforms/bijections/actnorm.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 17
|
2020-08-15T05:43:34.000Z
|
2022-01-31T12:24:21.000Z
|
survae/transforms/bijections/actnorm.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 35
|
2020-08-24T06:55:37.000Z
|
2022-02-11T05:17:58.000Z
|
import torch
import torch.nn as nn
from survae.transforms.bijections import Bijection
class _ActNormBijection(Bijection):
'''
Base class for activation normalization [1].
References:
[1] Glow: Generative Flow with Invertible 1×1 Convolutions,
Kingma & Dhariwal, 2018, https://arxiv.org/abs/1807.03039
'''
def __init__(self, num_features, data_dep_init=True, eps=1e-6):
super(_ActNormBijection, self).__init__()
self.num_features = num_features
self.data_dep_init = data_dep_init
self.eps = eps
self.register_buffer('initialized', torch.zeros(1) if data_dep_init else torch.ones(1))
self.register_params()
def data_init(self, x):
self.initialized += 1.
with torch.no_grad():
x_mean, x_std = self.compute_stats(x)
self.shift.data = x_mean
self.log_scale.data = torch.log(x_std + self.eps)
def forward(self, x):
if self.training and not self.initialized: self.data_init(x)
z = (x - self.shift) * torch.exp(-self.log_scale)
ldj = torch.sum(-self.log_scale).expand([x.shape[0]]) * self.ldj_multiplier(x)
return z, ldj
def inverse(self, z):
return self.shift + z * torch.exp(self.log_scale)
def register_params(self):
'''Register parameters shift and log_scale'''
raise NotImplementedError()
def compute_stats(self, x):
'''Compute x_mean and x_std'''
raise NotImplementedError()
def ldj_multiplier(self, x):
'''Multiplier for ldj'''
raise NotImplementedError()
class ActNormBijection(_ActNormBijection):
'''
Activation normalization [1] for inputs on the form (B,D).
The bias and scale get initialized using the mean and variance of the
first mini-batch. After the init, bias and scale are trainable parameters.
References:
[1] Glow: Generative Flow with Invertible 1×1 Convolutions,
Kingma & Dhariwal, 2018, https://arxiv.org/abs/1807.03039
'''
def register_params(self):
'''Register parameters shift and log_scale'''
self.register_parameter('shift', nn.Parameter(torch.zeros(1, self.num_features)))
self.register_parameter('log_scale', nn.Parameter(torch.zeros(1, self.num_features)))
def compute_stats(self, x):
'''Compute x_mean and x_std'''
x_mean = torch.mean(x, dim=0, keepdim=True)
x_std = torch.std(x, dim=0, keepdim=True)
return x_mean, x_std
def ldj_multiplier(self, x):
'''Multiplier for ldj'''
return 1
class ActNormBijection1d(_ActNormBijection):
'''
Activation normalization [1] for inputs on the form (B,C,L).
The bias and scale get initialized using the mean and variance of the
first mini-batch. After the init, bias and scale are trainable parameters.
References:
[1] Glow: Generative Flow with Invertible 1×1 Convolutions,
Kingma & Dhariwal, 2018, https://arxiv.org/abs/1807.03039
'''
def register_params(self):
'''Register parameters shift and log_scale'''
self.register_parameter('shift', nn.Parameter(torch.zeros(1, self.num_features, 1)))
self.register_parameter('log_scale', nn.Parameter(torch.zeros(1, self.num_features, 1)))
def compute_stats(self, x):
'''Compute x_mean and x_std'''
x_mean = torch.mean(x, dim=[0, 2], keepdim=True)
x_std = torch.std(x, dim=[0, 2], keepdim=True)
return x_mean, x_std
def ldj_multiplier(self, x):
'''Multiplier for ldj'''
return x.shape[2]
class ActNormBijection2d(_ActNormBijection):
'''
Activation normalization [1] for inputs on the form (B,C,H,W).
The bias and scale get initialized using the mean and variance of the
first mini-batch. After the init, bias and scale are trainable parameters.
References:
[1] Glow: Generative Flow with Invertible 1×1 Convolutions,
Kingma & Dhariwal, 2018, https://arxiv.org/abs/1807.03039
'''
def register_params(self):
'''Register parameters shift and log_scale'''
self.register_parameter('shift', nn.Parameter(torch.zeros(1, self.num_features, 1, 1)))
self.register_parameter('log_scale', nn.Parameter(torch.zeros(1, self.num_features, 1, 1)))
def compute_stats(self, x):
'''Compute x_mean and x_std'''
x_mean = torch.mean(x, dim=[0, 2, 3], keepdim=True)
x_std = torch.std(x, dim=[0, 2, 3], keepdim=True)
return x_mean, x_std
def ldj_multiplier(self, x):
'''Multiplier for ldj'''
return x.shape[2:4].numel()
| 35.356061
| 99
| 0.650739
| 647
| 4,667
| 4.561051
| 0.170015
| 0.048797
| 0.040664
| 0.042697
| 0.728905
| 0.715351
| 0.715351
| 0.714673
| 0.702135
| 0.692647
| 0
| 0.029067
| 0.23334
| 4,667
| 131
| 100
| 35.625954
| 0.794578
| 0.336405
| 0
| 0.3
| 0
| 0
| 0.018251
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.266667
| false
| 0
| 0.05
| 0.016667
| 0.516667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
79ef4928fc936591bcfb30eaf80360990a0196fb
| 2,472
|
py
|
Python
|
disputatio/routines/vanes/eval_slope.py
|
nicholasmalaya/paleologos
|
11959056caa80d3c910759b714a0f8e42f986f0f
|
[
"MIT"
] | 1
|
2021-11-04T17:49:42.000Z
|
2021-11-04T17:49:42.000Z
|
disputatio/routines/vanes/eval_slope.py
|
nicholasmalaya/paleologos
|
11959056caa80d3c910759b714a0f8e42f986f0f
|
[
"MIT"
] | null | null | null |
disputatio/routines/vanes/eval_slope.py
|
nicholasmalaya/paleologos
|
11959056caa80d3c910759b714a0f8e42f986f0f
|
[
"MIT"
] | 2
|
2019-01-04T16:08:18.000Z
|
2019-12-16T19:34:24.000Z
|
import numpy as np
def slope_func(x,y):
tmp = 0.0
#
# code block
#
tmp += 25.726409509885706 * x**0 * y**0
print tmp
tmp += -7.959002774480191 * x**0 * y**1
print tmp
tmp += -15.508167202030295 * x**0 * y**2
print tmp
tmp += 2.965402639113657 * x**0 * y**3
print tmp
tmp += 2.908979127634638 * x**0 * y**4
print tmp
tmp += -0.472590823195663 * x**0 * y**5
print tmp
tmp += 34.433271501999322 * x**1 * y**0
print tmp
tmp += -5.272449164192513 * x**1 * y**1
print tmp
tmp += -22.326487602085692 * x**1 * y**2
print tmp
tmp += 2.102507411190583 * x**1 * y**3
print tmp
tmp += 3.914198314742165 * x**1 * y**4
print tmp
tmp += -0.414792906597238 * x**1 * y**5
print tmp
tmp += 18.642522726102548 * x**2 * y**0
print tmp
tmp += -0.472154879795198 * x**2 * y**1
print tmp
tmp += -12.254352889020941 * x**2 * y**2
print tmp
tmp += 0.184571173718402 * x**2 * y**3
print tmp
tmp += 1.987885398263920 * x**2 * y**4
print tmp
tmp += -0.133536246179303 * x**2 * y**5
print tmp
tmp += 4.998330147251158 * x**3 * y**0
print tmp
tmp += 0.405313228278152 * x**3 * y**1
print tmp
tmp += -3.237761593593429 * x**3 * y**2
print tmp
tmp += -0.173496672310084 * x**3 * y**3
print tmp
tmp += 0.478992888127426 * x**3 * y**4
print tmp
tmp += -0.019284794711885 * x**3 * y**5
print tmp
tmp += 0.658962676868528 * x**4 * y**0
print tmp
tmp += 0.112796968137289 * x**4 * y**1
print tmp
tmp += -0.414470445248186 * x**4 * y**2
print tmp
tmp += -0.047912734297560 * x**4 * y**3
print tmp
tmp += 0.054948414261503 * x**4 * y**4
print tmp
tmp += -0.001201763731608 * x**4 * y**5
print tmp
tmp += 0.034118218498095 * x**5 * y**0
print tmp
tmp += 0.008461273235451 * x**5 * y**1
print tmp
tmp += -0.020661658530202 * x**5 * y**2
print tmp
tmp += -0.003578246014591 * x**5 * y**3
print tmp
tmp += 0.002403161687857 * x**5 * y**4
print tmp
tmp += -0.000024490557266 * x**5 * y**5
print tmp
#
# end code block
#
return tmp
def main():
x = -5.5
y = -3.5
m = slope_func(x,y)
print 'x,y: ',x,y,m
theta = np.tan(m)
print 'theta =', theta*180/np.pi
dx = np.cos(theta)
dy = np.sin(theta)
print 'dx: ',dx
print 'dy: ',dy
print 'dy/dx: ',y/x
#
# execute
#
main()
| 22.888889
| 44
| 0.533981
| 393
| 2,472
| 3.35369
| 0.19084
| 0.218513
| 0.292109
| 0.191199
| 0.362671
| 0.24431
| 0
| 0
| 0
| 0
| 0
| 0.382794
| 0.299353
| 2,472
| 107
| 45
| 23.102804
| 0.378176
| 0.01335
| 0
| 0.404494
| 0
| 0
| 0.011116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.011236
| null | null | 0.460674
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
0300f144fa364f106cb253231f92cba44477f2d7
| 89
|
py
|
Python
|
Task2/my_packages/Transformers/StringTransformer.py
|
AbdelrahmanKandil/assignment_6
|
a2f6311c136da6e1a53ab30c3a2aa9f3d3e83436
|
[
"MIT"
] | null | null | null |
Task2/my_packages/Transformers/StringTransformer.py
|
AbdelrahmanKandil/assignment_6
|
a2f6311c136da6e1a53ab30c3a2aa9f3d3e83436
|
[
"MIT"
] | null | null | null |
Task2/my_packages/Transformers/StringTransformer.py
|
AbdelrahmanKandil/assignment_6
|
a2f6311c136da6e1a53ab30c3a2aa9f3d3e83436
|
[
"MIT"
] | null | null | null |
def reverse(txt):
return txt[::-1]
def capitalize(txt):
return txt.capitalize()
| 14.833333
| 27
| 0.651685
| 12
| 89
| 4.833333
| 0.5
| 0.310345
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 0.191011
| 89
| 6
| 27
| 14.833333
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
cefb97024d5ba37345e06f64fb7538b1dc50dc6a
| 1,702
|
py
|
Python
|
tests/test_string.py
|
CastixGitHub/i3-workspace-names-daemon
|
4a7d2d68629c37b86f6c2adcc1c35a54dc0c60d8
|
[
"MIT"
] | null | null | null |
tests/test_string.py
|
CastixGitHub/i3-workspace-names-daemon
|
4a7d2d68629c37b86f6c2adcc1c35a54dc0c60d8
|
[
"MIT"
] | 1
|
2020-02-23T11:15:17.000Z
|
2020-02-23T11:15:17.000Z
|
tests/test_string.py
|
CastixGitHub/i3-workspace-names-daemon
|
4a7d2d68629c37b86f6c2adcc1c35a54dc0c60d8
|
[
"MIT"
] | null | null | null |
import unittest
from i3_workspace_names_daemon import truncate, compress
class TestString(unittest.TestCase):
def test_compress_dash(self):
original = "i3-workspace-names-daemon"
expected = "i3-wor-nam-dae"
actual = compress(original)
self.assertEqual(expected, actual)
def test_compress_unserscore(self):
original = "i3_workspace_names_daemon"
expected = "i3_wor_nam_dae"
actual = compress(original)
self.assertEqual(expected, actual)
def test_compress_mixed(self):
original = "i3-workspace_names_daemon"
expected = "i3-wor_nam_dae"
actual = compress(original)
self.assertEqual(expected, actual)
def test_compress_trailing(self):
original = "i3-workspace-names-daemon_"
expected = "i3-wor-nam-dae"
actual = compress(original)
self.assertEqual(expected, actual)
def test_compress_trailing_double(self):
original = "i3-workspace-names-daemon__"
expected = "i3-wor-nam-dae_"
actual = compress(original)
self.assertEqual(expected, actual)
def test_compress_leading(self):
original = "_i3-workspace-names-daemon"
expected = "i3-wor-nam-dae"
actual = compress(original)
self.assertEqual(expected, actual)
def test_compress_short(self):
original = "a-b-c-d"
expected = "a-b-c-d"
actual = compress(original)
self.assertEqual(expected, actual)
def test_compress_dash_double(self):
original = "i3-workspace--names-daemon"
expected = "i3-wor-nam-dae"
actual = compress(original)
self.assertEqual(expected, actual)
| 31.518519
| 56
| 0.657462
| 192
| 1,702
| 5.640625
| 0.171875
| 0.081256
| 0.11819
| 0.162512
| 0.842105
| 0.842105
| 0.842105
| 0.842105
| 0.842105
| 0.842105
| 0
| 0.011619
| 0.241481
| 1,702
| 53
| 57
| 32.113208
| 0.827266
| 0
| 0
| 0.465116
| 0
| 0
| 0.17215
| 0.105758
| 0
| 0
| 0
| 0
| 0.186047
| 1
| 0.186047
| false
| 0
| 0.046512
| 0
| 0.255814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cefe76a7442130e715f26388cb7f6d6efc6402df
| 57
|
py
|
Python
|
karlovic/forms/__init__.py
|
Aiwizo/karlovic
|
94f8f2a5adc436fb662bfe9b8ad5211be0833baa
|
[
"Apache-2.0"
] | null | null | null |
karlovic/forms/__init__.py
|
Aiwizo/karlovic
|
94f8f2a5adc436fb662bfe9b8ad5211be0833baa
|
[
"Apache-2.0"
] | 3
|
2020-10-28T12:43:18.000Z
|
2020-12-15T15:38:16.000Z
|
karlovic/forms/__init__.py
|
Aiwizo/karlovic
|
94f8f2a5adc436fb662bfe9b8ad5211be0833baa
|
[
"Apache-2.0"
] | null | null | null |
from karlovic.forms.use_image_form import use_image_form
| 28.5
| 56
| 0.894737
| 10
| 57
| 4.7
| 0.7
| 0.340426
| 0.510638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 57
| 1
| 57
| 57
| 0.886792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
06359ebcf7c6fe0e1db0cedcbabcaa8ec0f47912
| 189
|
py
|
Python
|
tests/test_imp.py
|
assasinitachi123/extra-boolean
|
632bf238bd44df0a31be8fb970ed65f812dad166
|
[
"MIT"
] | null | null | null |
tests/test_imp.py
|
assasinitachi123/extra-boolean
|
632bf238bd44df0a31be8fb970ed65f812dad166
|
[
"MIT"
] | 2
|
2021-04-01T12:07:06.000Z
|
2021-04-01T12:30:17.000Z
|
tests/test_imp.py
|
assasinitachi123/extra-boolean
|
632bf238bd44df0a31be8fb970ed65f812dad166
|
[
"MIT"
] | 3
|
2021-04-01T11:29:06.000Z
|
2021-04-01T12:07:55.000Z
|
from extra_boolean import imp
def test_imp():
assert imp(True, True) == True
assert imp(False, True) == True
assert imp(False, False) == True
assert imp(True, False) == False
| 21
| 35
| 0.666667
| 28
| 189
| 4.428571
| 0.357143
| 0.290323
| 0.314516
| 0.274194
| 0.354839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21164
| 189
| 8
| 36
| 23.625
| 0.832215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.166667
| true
| 0
| 0.166667
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
069267166d8f8b1bd2db17d2c743a1f0c06580bf
| 7,308
|
py
|
Python
|
cifar10_models/Tiled_layers.py
|
rayjyh/PyTorch_CIFAR10_rram_compenstion
|
61a53c85a74abc965a5bea3e4b102e9a7ad8f03a
|
[
"MIT"
] | null | null | null |
cifar10_models/Tiled_layers.py
|
rayjyh/PyTorch_CIFAR10_rram_compenstion
|
61a53c85a74abc965a5bea3e4b102e9a7ad8f03a
|
[
"MIT"
] | null | null | null |
cifar10_models/Tiled_layers.py
|
rayjyh/PyTorch_CIFAR10_rram_compenstion
|
61a53c85a74abc965a5bea3e4b102e9a7ad8f03a
|
[
"MIT"
] | null | null | null |
import torch
from torch import Tensor
import torch.nn as nn
from torch.nn.parameter import Parameter
import torch.nn.init as init
import torch.nn.functional as F
import math
__all__ = [
"TiledConv2D",
"TiledLinear"
]
class TiledConv2D(nn.Module):
"""
2D convolution layer with weight tiling for crossbar operation and DAC/ADC quantization
For implementation details, please see tiling.pdf
"""
def __init__(
self,
in_channels: int,
out_channels: int,
kernel_size: int,
stride: int = 1,
padding: int = 1,
bias: bool = True,
xbar_dim: int = 64,
adc_low: float = -0.1,
adc_high: float = 0.1,
width: int = 8,
device=None,
dtype=None
) -> None:
factory_kwargs = {'device': device, 'dtype': dtype}
super(TiledConv2D, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = kernel_size
self.stride = stride
self.padding = padding
self.weight = Parameter(torch.empty(
(out_channels, in_channels, kernel_size, kernel_size),
**factory_kwargs))
self.weight.requires_grad = False
if bias:
self.bias = Parameter(torch.empty(out_channels, **factory_kwargs))
else:
self.register_parameter('bias', None)
self.xbar_dim = xbar_dim
self.adc_low = adc_low
self.adc_high = adc_high
self.width = width
self.device = device
self.reset_parameters()
# reshape weight into (num_xbar_per_row, out_channels, in_channels, kernel_size, kernel_size)
self.num_xbar_per_row = math.ceil((in_channels * kernel_size ** 2) / xbar_dim)
self.tiled_weight = self.weight.repeat(self.num_xbar_per_row, 1, 1, 1, 1).to(device)
self.tiled_weight.requires_grad = False
self.tiled_weight.resize_(self.num_xbar_per_row, self.out_channels, in_channels*kernel_size**2)
# repeat weights
self.repeat_weights()
def reset_parameters(self) -> None:
init.kaiming_uniform_(self.weight, a=math.sqrt(5))
if self.bias is not None:
fan_in, _ = init._calculate_fan_in_and_fan_out(self.weight)
bound = 1 / math.sqrt(fan_in)
init.uniform_(self.bias, -bound, bound)
def repeat_weights(self):
# as many weight copy as the number of crossbars in one column
# for each weight copy, only the corresponding positions of the associated crossbar have valid weights
# this is how to get the partial sum
mask = torch.zeros_like(self.tiled_weight)
for i in range(self.num_xbar_per_row-1):
mask[i, :, i*self.xbar_dim:(i+1)*self.xbar_dim] = 1
mask[self.num_xbar_per_row-1, :, (self.num_xbar_per_row-1)*self.xbar_dim:] = 1
self.tiled_weight *= mask
self.tiled_weight.resize_(self.num_xbar_per_row, self.out_channels, self.in_channels,
self.kernel_size, self.kernel_size)
def adc(self, input):
step = (self.adc_high - self.adc_low) / (2 ** self.width)
if input > self.adc_high:
return self.adc_high
elif input < self.adc_low:
return self.adc_low
else:
return step * round((input - self.adc_low) / step) + self.adc_low
def forward(self, input: Tensor):
# TODO: add DAC quantization here
_output = []
# do convolution with all weight copies for partial sum
for i in range(self.num_xbar_per_row):
_output.append(F.conv2d(input, self.tiled_weight[i], self.bias,
self.stride, self.padding))
output = torch.stack(_output, dim=2)
# ADC quantization before adding up partial sums
output.apply_(self.adc)
output = torch.sum(output, dim=2).to(self.device)
return output
class TiledLinear(nn.Module):
"""
Linear layer with weight tiling for crossbar operation and DAC/ADC quantization
For implementation details, please see tiling.pdf
"""
def __init__(
self,
in_features: int,
out_features: int,
bias: bool = True,
xbar_dim: int = 64,
adc_high: float = 0.1,
adc_low: float = -0.1,
width: int = 8,
device=None,
dtype=None
) -> None:
factory_kwargs = {'device': device, 'dtype': dtype}
super(TiledLinear, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = Parameter(torch.empty(
(out_features, in_features), **factory_kwargs))
self.weight.requires_grad = False
if bias:
self.bias = Parameter(torch.empty(out_features, **factory_kwargs))
else:
self.register_parameter('bias', None)
self.xbar_dim = xbar_dim
self.adc_low = adc_low
self.adc_high = adc_high
self.width = width
self.device = device
self.reset_parameters()
# reshape weight into (num_xbar_per_row, out_channels, in_channels, kernel_size, kernel_size)
self.num_xbar_per_row = math.ceil(in_features / xbar_dim)
self.tiled_weight = self.weight.repeat(self.num_xbar_per_row, 1, 1).to(device)
self.tiled_weight.requires_grad = False
# repeat weights
self.repeat_weights()
def reset_parameters(self) -> None:
init.kaiming_uniform_(self.weight, a=math.sqrt(5))
if self.bias is not None:
fan_in, _ = init._calculate_fan_in_and_fan_out(self.weight)
bound = 1 / math.sqrt(fan_in)
init.uniform_(self.bias, -bound, bound)
def repeat_weights(self):
# as many weight copy as the number of crossbars in one column
# for each weight copy, only the corresponding positions of the associated crossbar have valid weights
# this is how to get the partial sum
mask = torch.zeros_like(self.tiled_weight)
for i in range(self.num_xbar_per_row-1):
mask[i, :, i*self.xbar_dim:(i+1)*self.xbar_dim] = 1
mask[self.num_xbar_per_row-1, :, (self.num_xbar_per_row-1)*self.xbar_dim:] = 1
self.tiled_weight *= mask
def adc(self, input):
step = (self.adc_high - self.adc_low) / (2 ** self.width)
if input > self.adc_high:
return self.adc_high
elif input < self.adc_low:
return self.adc_low
else:
return step * round((input - self.adc_low) / step) + self.adc_low
def forward(self, input: Tensor):
# TODO: add DAC quantization here
_output = []
# do convolution with all weight copies for partial sum
for i in range(self.num_xbar_per_row):
_output.append(F.linear(input, self.tiled_weight[i], self.bias))
output = torch.stack(_output, dim=1).to('cpu')
# TADC quantization before adding up partial sums
output.detach().apply_(self.adc)
output = torch.sum(output, dim=1).to(self.device)
return output
'''
a = torch.randn(5, 10).to("cuda")
m_l = TiledLinear(10, 20, xbar_dim=16, device='cuda')
output_l = m_l(a)
'''
| 37.670103
| 110
| 0.6211
| 991
| 7,308
| 4.355197
| 0.152371
| 0.035681
| 0.037071
| 0.048193
| 0.8443
| 0.797961
| 0.779194
| 0.745829
| 0.7076
| 0.7076
| 0
| 0.011407
| 0.280241
| 7,308
| 194
| 111
| 37.670103
| 0.809125
| 0.156678
| 0
| 0.657534
| 0
| 0
| 0.009217
| 0
| 0
| 0
| 0
| 0.010309
| 0
| 1
| 0.068493
| false
| 0
| 0.047945
| 0
| 0.184932
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2345ec698176688d7d131a43349b4e0bcd2a7834
| 2,282
|
py
|
Python
|
scripts/res.py
|
KonstantinosChatziantoniou/GraphTrianglesCounting
|
1a35f8ac8756a72dc7919def6069a687743d25d7
|
[
"MIT"
] | 1
|
2020-09-19T20:06:52.000Z
|
2020-09-19T20:06:52.000Z
|
scripts/res.py
|
KonstantinosChatziantoniou/GraphTrianglesCounting
|
1a35f8ac8756a72dc7919def6069a687743d25d7
|
[
"MIT"
] | null | null | null |
scripts/res.py
|
KonstantinosChatziantoniou/GraphTrianglesCounting
|
1a35f8ac8756a72dc7919def6069a687743d25d7
|
[
"MIT"
] | null | null | null |
import pandas
import matplotlib.pyplot as plt
datasets = ['auto.mtx', 'delaunay_n22.mtx', 'great-britain_osm.mtx']
paths = ['cuda', 'serial_cilk']
stats = {}
stats['cuda'] = []
stats['serial'] = []
stats['cilk'] = []
stats['matlab'] = [3.3, 4.43 ,1.6]
stats['cuda960'] = [0.07 + 0.08, 0.1 + 0.39 ,0.1 + 0.47]
# read cuda
for d in datasets:
p = paths[0]
df = pandas.read_csv(d+p+'.csv', header=None)
df = df.mean()
read_time = df[0]
csr_time = df[1]
mem_time = df[2]
exec_time = df[3]
stats['cuda'].append(df[2] + df[3])
# read serial
for d in datasets:
p = paths[1]
df = pandas.read_csv(d+p+'.csv', header=None)
df = df.mean()
read_time = df[0]
csr_time = df[1]
mem_time = df[2]
exec_time = df[3]
stats['serial'].append(df[2])
stats['cilk'].append(df[3])
print(stats)
df = pandas.DataFrame({'cuda': stats['cuda'], 'serial':stats['serial'], 'cilk': stats['cilk'],'cuda960': stats['cuda960'],'matlab':stats['matlab']}, index=datasets)
a = df.plot.bar( rot=0, title='Mean execution times')
a.set_xlabel('dataset')
a.set_ylabel('sec')
print(df)
plt.show()
############## FOR MIN ############
stats = {}
stats['cuda'] = []
stats['serial'] = []
stats['cilk'] = []
stats['matlab'] = [3.3, 4.43 ,1.6]
stats['cuda960'] = [0.07 + 0.08, 0.1 + 0.39 ,0.1 + 0.47]
# read cuda
for d in datasets:
p = paths[0]
df = pandas.read_csv(d+p+'.csv', header=None)
df = df.min()
read_time = df[0]
csr_time = df[1]
mem_time = df[2]
exec_time = df[3]
stats['cuda'].append(df[2] + df[3])
# read serial
for d in datasets:
p = paths[1]
df = pandas.read_csv(d+p+'.csv', header=None)
df = df.min()
read_time = df[0]
csr_time = df[1]
mem_time = df[2]
exec_time = df[3]
stats['serial'].append(df[2])
stats['cilk'].append(df[3])
print(stats)
df = pandas.DataFrame({'cuda': stats['cuda'], 'serial':stats['serial'], 'cilk': stats['cilk'],'cuda960': stats['cuda960'],'matlab':stats['matlab']}, index=datasets)
a = df.plot.bar( rot=0, title='Min execution times')
a.set_xlabel('dataset')
a.set_ylabel('sec')
print(df)
plt.show()
| 24.276596
| 164
| 0.54908
| 351
| 2,282
| 3.492877
| 0.182336
| 0.078303
| 0.009788
| 0.045677
| 0.903752
| 0.903752
| 0.903752
| 0.903752
| 0.903752
| 0.903752
| 0
| 0.054951
| 0.234443
| 2,282
| 93
| 165
| 24.537634
| 0.646823
| 0.022787
| 0
| 0.911765
| 0
| 0
| 0.147929
| 0.009558
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.029412
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2357036edd24e8c52240af2594dd868a214a3847
| 16,788
|
py
|
Python
|
src/train.py
|
zihangJiang/Adaptive-Attention
|
45eeb8fd629a81eebb3c8a8b869551f4f8738325
|
[
"Apache-2.0"
] | 22
|
2021-04-06T11:54:50.000Z
|
2022-03-18T03:27:31.000Z
|
src/train.py
|
zihangJiang/Adaptive-Attention
|
45eeb8fd629a81eebb3c8a8b869551f4f8738325
|
[
"Apache-2.0"
] | 1
|
2021-06-01T15:26:44.000Z
|
2021-06-01T17:21:02.000Z
|
src/train.py
|
zihangJiang/Adaptive-Attention
|
45eeb8fd629a81eebb3c8a8b869551f4f8738325
|
[
"Apache-2.0"
] | 1
|
2021-06-29T06:07:16.000Z
|
2021-06-29T06:07:16.000Z
|
# coding=utf-8
from cfr_loss import cfr_loss as loss_fn, ensembled_loss as en_loss_fn, proto_loss as p_loss, relation_loss as r_loss
from initializer import init_crfnet,init_protonet, init_log_file, init_seed, init_dataloader, init_optim, init_lr_scheduler,init_relationnet
from parser import get_parser
from tqdm import tqdm
import numpy as np
import torch
import os
default_device = 'cuda:0'
def train(opt, tr_dataloader, model, optim, lr_scheduler, val_dataloader=None, logger = None):
'''
Train the model with the reweighting algorithm
'''
device = default_device if torch.cuda.is_available() and opt.cuda else 'cpu'
if val_dataloader is None:
best_state = None
train_loss = []
train_perm_loss = []
train_acc = []
train_inter_class_loss = []
train_perm_acc = []
val_loss = []
val_acc = []
best_acc = 0
best_model_path = os.path.join(opt.experiment_root, 'best_model.pth')
last_model_path = os.path.join(opt.experiment_root, 'last_model.pth')
for epoch in range(opt.epochs):
logger.info('=== Epoch: {} ==='.format(epoch))
if opt.switch:
tr_dataloader.dataset.switch_image_size(224)
val_dataloader.dataset.switch_image_size(224)
tr_iter = iter(tr_dataloader)
model.train()
for batch in tqdm(tr_iter):
optim.zero_grad()
x, y = batch
s = x[:opt.classes_per_it_tr*opt.num_support_tr]
# commit here to allow standard input combined into the query batch while training
x = x[opt.classes_per_it_tr*opt.num_support_tr:]
x, y, s = x, y.cuda(), s
s = s.repeat([len([int(gpu_id) for gpu_id in opt.gpu if gpu_id.isdigit()]),1,1,1])
model_output, perm_output, atten_output, perm_atten_output, weight, perm_weight = model(x, s)
loss, acc , perm_loss, perm_acc, inter_class_loss, ensemble_loss \
= en_loss_fn(model_output,perm_output, perm_weight, weight, y,\
class_per_it=opt.classes_per_it_tr ,num_support = opt.num_support_tr)
atten_loss, atten_acc , atten_perm_loss, atten_perm_acc, atten_inter_class_loss, atten_ensemble_loss \
= en_loss_fn(atten_output,perm_atten_output, perm_weight, weight, y,\
class_per_it=opt.classes_per_it_tr ,num_support = opt.num_support_tr)
loss = loss + atten_loss
if opt.use_perm:
loss = loss + perm_loss + atten_perm_loss
if opt.use_inter_class:
loss = loss + 0.001*inter_class_loss + 0.1*ensemble_loss + 0.1*atten_ensemble_loss
loss.backward()
optim.step()
train_loss.append(loss.item())
train_acc.append(acc.item())
train_perm_loss.append(perm_loss.item())
train_perm_acc.append(perm_acc.item())
train_inter_class_loss.append(inter_class_loss.item())
# print(train_loss)
avg_loss = np.mean(train_loss[-opt.iterations:])
avg_perm_loss = np.mean(train_perm_loss[-opt.iterations:])
avg_inter_class_loss = np.mean(train_inter_class_loss[-opt.iterations:])
avg_acc = np.mean(train_acc[-opt.iterations:])
avg_perm_acc = np.mean(train_perm_acc[-opt.iterations:])
logger.info('Avg Train Loss: {}, Avg Perm Loss:{}, Avg InCl Loss:{}, Avg Train Acc: {}, Perm Acc:{}'.format(avg_loss, avg_perm_loss, avg_inter_class_loss, avg_acc, avg_perm_acc))
lr_scheduler.step()
if val_dataloader is None:
continue
model.eval()
with torch.no_grad():
eps = 1
val_iter = iter(val_dataloader)
for batch in tqdm(val_iter):
x, y = batch
s = x[:opt.classes_per_it_val*opt.num_support_val]
x = x[opt.classes_per_it_val*opt.num_support_val:]
x, y, s = x.cuda(), y.cuda(), s.cuda()
s = s.repeat([len([int(gpu_id) for gpu_id in opt.gpu if gpu_id.isdigit()]),1,1,1])
model_output, perm_output, atten_output, perm_atten_output, weight, perm_weight = model(x, s)
loss, acc , perm_loss, perm_acc, inter_class_loss, ensemble_loss \
= en_loss_fn(model_output,perm_output, perm_weight, weight, y,\
class_per_it=opt.classes_per_it_val ,num_support = opt.num_support_val)
val_loss.append(loss.item())
val_acc.append(acc.item())
# import pdb; pdb.set_trace()
avg_loss = np.mean(val_loss[-len(val_iter):])
avg_acc = np.mean(val_acc[-len(val_iter):])
postfix = ' (Best)' if avg_acc >= best_acc else ' (Best: {})'.format(best_acc)
logger.info('Avg Val Loss: {}, Avg Val Acc: {}{}'.format(avg_loss, avg_acc, postfix))
if avg_acc >= best_acc:
torch.save(model.module.state_dict(), best_model_path)
best_acc = avg_acc
best_state = model.module.state_dict()
torch.save(model.module.state_dict(), os.path.join(opt.experiment_root, 'best_model{}.pth'.format(epoch)))
torch.save(model.module.state_dict(), last_model_path)
return best_state, best_acc, train_loss, train_acc, val_loss, val_acc
def main(options, logger):
'''
Initialize everything and train
'''
logger.info('Algorithm options %s' % options)
if not os.path.exists(options.experiment_root):
os.makedirs(options.experiment_root)
if torch.cuda.is_available() and not options.cuda:
logger.info("WARNING: You have a CUDA device, so you should probably run with --cuda")
init_seed(options)
tr_dataloader = init_dataloader(options, 'train')
val_dataloader = init_dataloader(options, 'val')
model = init_crfnet(options)
logger.info('Model Config')
logger.info(model)
if options.load:
logger.info('load old model')
model_path = os.path.join(options.experiment_root, 'best_model.pth')
model.load_state_dict(torch.load(model_path,map_location=default_device))
model=torch.nn.DataParallel(model,device_ids=range(len([int(gpu_id) for gpu_id in options.gpu if gpu_id.isdigit()]))).cuda()
optim = init_optim(options, model)
lr_scheduler = init_lr_scheduler(options, optim)
res = train(opt=options,
tr_dataloader=tr_dataloader,
val_dataloader=val_dataloader,
model=model,
optim=optim,
lr_scheduler=lr_scheduler, logger = logger)
best_state, best_acc, train_loss, train_acc, val_loss, val_acc = res
del model
return best_acc
def train_relation(options, logger):
'''
Initialize everything and train
'''
logger.info('Algorithm options %s' % options)
if not os.path.exists(options.experiment_root):
os.makedirs(options.experiment_root)
if torch.cuda.is_available() and not options.cuda:
logger.info("WARNING: You have a CUDA device, so you should probably run with --cuda")
init_seed(options)
tr_dataloader = init_dataloader(options, 'train')
val_dataloader = init_dataloader(options, 'val')
model = init_relationnet(options)
logger.info('Model Config')
logger.info(model)
if options.load:
logger.info('load old model')
model_path = os.path.join(options.experiment_root, 'best_model.pth')
model.load_state_dict(torch.load(model_path,map_location=default_device), strict = False)
model=torch.nn.DataParallel(model,device_ids=range(len([int(gpu_id) for gpu_id in options.gpu if gpu_id.isdigit()]))).cuda()
optim = init_optim(options, model)
lr_scheduler = init_lr_scheduler(options, optim)
res = train(opt=options,
tr_dataloader=tr_dataloader,
val_dataloader=val_dataloader,
model=model,
optim=optim,
lr_scheduler=lr_scheduler, logger = logger)
best_state, best_acc, train_loss, train_acc, val_loss, val_acc = res
del model
return best_acc
def train_proto(options, logger):
'''
Initialize everything and train
'''
logger.info('Algorithm options %s' % options)
if not os.path.exists(options.experiment_root):
os.makedirs(options.experiment_root)
if torch.cuda.is_available() and not options.cuda:
logger.info("WARNING: You have a CUDA device, so you should probably run with --cuda")
init_seed(options)
tr_dataloader = init_dataloader(options, 'train')
val_dataloader = init_dataloader(options, 'val')
model = init_protonet(options)
logger.info('Model Config')
logger.info(model)
device = default_device if torch.cuda.is_available() and options.cuda else 'cpu'
if options.load:
logger.info('load old model')
model_path = os.path.join(options.experiment_root, 'best_model.pth')
model.load_state_dict(torch.load(model_path,map_location=default_device))
model=torch.nn.DataParallel(model,device_ids=range(len([int(gpu_id) for gpu_id in options.gpu if gpu_id.isdigit()])))
optim = init_optim(options, model)
lr_scheduler = init_lr_scheduler(options, optim)
train_loss = []
train_acc = []
val_loss = []
val_acc = []
best_acc = 0
best_model_path = os.path.join(options.experiment_root, 'best_model.pth')
last_model_path = os.path.join(options.experiment_root, 'last_model.pth')
for epoch in range(options.epochs):
logger.info('=== Epoch: {} ==='.format(epoch))
if options.switch:
tr_dataloader.dataset.switch_image_size()
val_dataloader.dataset.switch_image_size()
tr_iter = iter(tr_dataloader)
model.train()
for batch in tqdm(tr_iter):
optim.zero_grad()
x, y = batch
x, y = x.to(device), y.to(device)
model_output = model(x)
ref_output = model_output[:options.classes_per_it_tr*options.num_support_tr].view(options.classes_per_it_tr,options.num_support_tr,-1).mean(dim = 1)
query_output = model_output[options.classes_per_it_tr*options.num_support_tr:]
loss, acc = p_loss(query_output, ref_output,y, class_per_it=options.classes_per_it_tr ,num_support = options.num_support_tr)
loss.backward()
optim.step()
train_loss.append(loss.item())
train_acc.append(acc.item())
avg_loss = np.mean(train_loss[-options.iterations:])
avg_acc = np.mean(train_acc[-options.iterations:])
logger.info('Avg Train Loss: {}, Avg Train Acc: {}'.format(avg_loss, avg_acc))
lr_scheduler.step()
if val_dataloader is None:
continue
model.eval()
with torch.no_grad():
eps = 1
val_iter = iter(val_dataloader)
for batch in tqdm(val_iter):
x, y = batch
x, y = x.to(device),y.to(device)
model_output = model(x)
ref_output = model_output[:options.classes_per_it_val*options.num_support_val].view(options.classes_per_it_val,options.num_support_val,-1).mean(dim = 1)
query_output = model_output[options.classes_per_it_val*options.num_support_val:]
loss, acc = p_loss(query_output, ref_output, y, class_per_it=options.classes_per_it_val ,num_support = options.num_support_val)
val_loss.append(loss.item())
val_acc.append(acc.item())
avg_loss = np.mean(val_loss[-len(val_iter):])
avg_acc = np.mean(val_acc[-len(val_iter):])
postfix = ' (Best)' if avg_acc >= best_acc else ' (Best: {})'.format(best_acc)
logger.info('Avg Val Loss: {}, Avg Val Acc: {}{}'.format(avg_loss, avg_acc, postfix))
if avg_acc >= best_acc:
torch.save(model.module.state_dict(), best_model_path)
best_acc = avg_acc
best_state = model.module.state_dict()
torch.save(model.module.state_dict(), last_model_path)
def train_relation_attention(options, logger):
'''
Initialize everything and train
'''
logger.info('Algorithm options %s' % options)
if not os.path.exists(options.experiment_root):
os.makedirs(options.experiment_root)
if torch.cuda.is_available() and not options.cuda:
logger.info("WARNING: You have a CUDA device, so you should probably run with --cuda")
init_seed(options)
tr_dataloader = init_dataloader(options, 'train')
val_dataloader = init_dataloader(options, 'val')
model = init_relationnet(options)
model.full_load = False
logger.info('Model Config')
logger.info(model)
device = default_device if torch.cuda.is_available() and options.cuda else 'cpu'
if options.load:
logger.info('load old model')
model_path = os.path.join(options.experiment_root, 'best_model.pth')
model.load_state_dict(torch.load(model_path,map_location=default_device))
model=torch.nn.DataParallel(model,device_ids=range(len([int(gpu_id) for gpu_id in options.gpu if gpu_id.isdigit()])))
optim = init_optim(options, model)
lr_scheduler = init_lr_scheduler(options, optim)
train_loss = []
train_acc = []
val_loss = []
val_acc = []
best_acc = 0
best_model_path = os.path.join(options.experiment_root, 'best_model.pth')
last_model_path = os.path.join(options.experiment_root, 'last_model.pth')
for epoch in range(options.epochs):
logger.info('=== Epoch: {} ==='.format(epoch))
if options.switch:
tr_dataloader.dataset.switch_image_size()
val_dataloader.dataset.switch_image_size()
tr_iter = iter(tr_dataloader)
model.train()
for batch in tqdm(tr_iter):
optim.zero_grad()
x, y = batch
s = x[:options.classes_per_it_tr*options.num_support_tr]
# commit here to allow standard input combined into the query batch while training
x = x[options.classes_per_it_tr*options.num_support_tr:]
x, y, s = x, y.cuda(), s
s = s.repeat([len([int(gpu_id) for gpu_id in options.gpu if gpu_id.isdigit()]),1,1,1])
model_output = model(x, s)
loss, acc = r_loss(model_output,y, class_per_it=options.classes_per_it_tr ,num_support = options.num_support_tr)
loss.backward()
optim.step()
train_loss.append(loss.item())
train_acc.append(acc.item())
avg_loss = np.mean(train_loss[-options.iterations:])
avg_acc = np.mean(train_acc[-options.iterations:])
logger.info('Avg Train Loss: {}, Avg Train Acc: {}'.format(avg_loss, avg_acc))
lr_scheduler.step()
if val_dataloader is None:
continue
model.eval()
with torch.no_grad():
eps = 1
val_iter = iter(val_dataloader)
for batch in tqdm(val_iter):
x, y = batch
s = x[:options.classes_per_it_val*options.num_support_val]
x = x[options.classes_per_it_val*options.num_support_val:]
x, y, s = x.cuda(), y.cuda(), s.cuda()
s = s.repeat([len([int(gpu_id) for gpu_id in options.gpu if gpu_id.isdigit()]),1,1,1])
model_output = model(x, s)
loss, acc = r_loss(model_output, y, class_per_it=options.classes_per_it_val ,num_support = options.num_support_val)
val_loss.append(loss.item())
val_acc.append(acc.item())
avg_loss = np.mean(val_loss[-len(val_iter):])
avg_acc = np.mean(val_acc[-len(val_iter):])
postfix = ' (Best)' if avg_acc >= best_acc else ' (Best: {})'.format(best_acc)
logger.info('Avg Val Loss: {}, Avg Val Acc: {}{}'.format(avg_loss, avg_acc, postfix))
if avg_acc >= best_acc:
torch.save(model.module.state_dict(), best_model_path)
best_acc = avg_acc
best_state = model.module.state_dict()
torch.save(model.module.state_dict(), last_model_path)
if __name__ == '__main__':
options = get_parser().parse_args()
options.stage = 'train'
os.environ["CUDA_VISIBLE_DEVICES"]=options.gpu
logger = init_log_file(options)
if options.prototypical:
train_proto(options, logger)
elif options.relation:
# train_relation_attention(options, logger)
train_relation(options, logger)
else:
main(options, logger)
| 42.287154
| 186
| 0.634739
| 2,299
| 16,788
| 4.357112
| 0.077425
| 0.028951
| 0.025157
| 0.026555
| 0.865129
| 0.848158
| 0.840571
| 0.826695
| 0.825696
| 0.800639
| 0
| 0.003033
| 0.253812
| 16,788
| 397
| 187
| 42.287154
| 0.796599
| 0.02609
| 0
| 0.760518
| 0
| 0.003236
| 0.066011
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016181
| false
| 0
| 0.022654
| 0
| 0.048544
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
23773c6d89464f85244937b4f17f1dca4236e043
| 8,228
|
py
|
Python
|
amfe/parametric/morphing/implementer/ffdimplementer.py
|
ma-kast/AMfe
|
99686cc313fb8904a093fb42e6cf0b38f8cfd791
|
[
"BSD-3-Clause"
] | null | null | null |
amfe/parametric/morphing/implementer/ffdimplementer.py
|
ma-kast/AMfe
|
99686cc313fb8904a093fb42e6cf0b38f8cfd791
|
[
"BSD-3-Clause"
] | null | null | null |
amfe/parametric/morphing/implementer/ffdimplementer.py
|
ma-kast/AMfe
|
99686cc313fb8904a093fb42e6cf0b38f8cfd791
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Copyright (c) 2018 TECHNICAL UNIVERSITY OF MUNICH, DEPARTMENT OF MECHANICAL ENGINEERING, CHAIR OF APPLIED MECHANICS,
# BOLTZMANNSTRASSE 15, 85748 GARCHING/MUNICH, GERMANY, RIXEN@TUM.DE.
#
# Distributed under 3-Clause BSD license. See LICENSE file for more information.
#
import numpy as np
from amfe.linalg.tools import coordinate_transform
from amfe.parametric.morphing.implementer import MorpherImplementer
class FfdMorpherImplementer(MorpherImplementer):
'''
Implements morphing with FFD technique
'''
def __init__(self, origin=np.array([[0],[0],[0]]), csys=np.eye(3), mu_shape=(3,3,3)):
super().__init__()
self._origin_box = np.array(origin).reshape((3, 1))
self._csys = np.array(csys).reshape((3,3))
# save transformations
physical_frame = self._csys
reference_frame = np.eye(3)
(self._transformation, self._inverse_transformation) = coordinate_transform(reference_frame, physical_frame)
(dim_n_mu, dim_m_mu, dim_t_mu) = mu_shape
self._dim_n_mu = dim_n_mu
self._dim_m_mu = dim_m_mu
self._dim_t_mu = dim_t_mu
self._bernstein_x = None
self._bernstein_y = None
self._bernstein_z = None
self._shift_mesh_points = None
self._no_of_dim = 3
self._no_of_mesh_points = 0
@property
def mu_shape(self):
return self._dim_n_mu, self._dim_m_mu, self._dim_t_mu
def offline(self, nodes_reference):
# apply transformation to original mesh points
reference_frame_mesh_points = self._transformation(nodes_reference.T - self._origin_box).T
# TODO: Raise error if not in bounding box
mesh_points = reference_frame_mesh_points
(n_rows_mesh, n_cols_mesh) = mesh_points.shape
self._no_of_mesh_points = n_rows_mesh
self._no_of_dim = n_cols_mesh
# Initialization. In order to exploit the contiguity in memory the
# following are transposed
self._bernstein_x = np.zeros((self._dim_n_mu, n_rows_mesh))
self._bernstein_y = np.zeros((self._dim_m_mu, n_rows_mesh))
self._bernstein_z = np.zeros((self._dim_t_mu, n_rows_mesh))
for i in range(0, self._dim_n_mu):
aux1 = np.power((1 - mesh_points[:, 0]), self._dim_n_mu - 1 - i)
aux2 = np.power(mesh_points[:, 0], i)
self._bernstein_x[i, :] = binom(self._dim_n_mu - 1, i) * np.multiply(
aux1, aux2)
for i in range(0, self._dim_m_mu):
aux1 = np.power((1 - mesh_points[:, 1]), self._dim_m_mu - 1 - i)
aux2 = np.power(mesh_points[:, 1], i)
self._bernstein_y[i, :] = binom(self._dim_m_mu - 1, i) * np.multiply(
aux1, aux2)
for i in range(0, self._dim_t_mu):
aux1 = np.power((1 - mesh_points[:, 2]), self._dim_t_mu - 1 - i)
aux2 = np.power(mesh_points[:, 2], i)
self._bernstein_z[i, :] = binom(self._dim_t_mu - 1, i) * np.multiply(
aux1, aux2)
def morph(self, nodes_reference, mu_x, mu_y, mu_z):
shifted_mesh_points = np.zeros((self._no_of_dim, self._no_of_mesh_points))
aux_x = 0.
aux_y = 0.
aux_z = 0.
for j in range(0, self._dim_m_mu):
for k in range(0, self._dim_t_mu):
bernstein_yz = np.multiply(self._bernstein_y[j, :], self._bernstein_z[k, :])
for i in range(0, self._dim_n_mu):
aux = np.multiply(self._bernstein_x[i, :], bernstein_yz)
aux_x += aux * mu_x[i, j, k]
aux_y += aux * mu_y[i, j, k]
aux_z += aux * mu_z[i, j, k]
shifted_mesh_points[0, :] += aux_x
shifted_mesh_points[1, :] += aux_y
shifted_mesh_points[2, :] += aux_z
# shift_mesh_points needs to be transposed to be summed with mesh_points
# apply inverse transformation to shifted mesh points
new_mesh_points = self._inverse_transformation(shifted_mesh_points).T + nodes_reference
return new_mesh_points
class FfdMorpherImplementer2D(MorpherImplementer):
'''
Implements morphing with FFD technique
'''
def __init__(self, origin=np.eye(2), csys=np.eye(2), mu_shape=(3,3)):
super().__init__()
self._origin_box = np.array(origin).reshape((2, 1))
self._csys = np.array(csys).reshape((2,2))
# save transformations
physical_frame = self._csys
reference_frame = sp.eye(2)
(self._transformation, self._inverse_transformation) = coordinate_transform(reference_frame, physical_frame)
(dim_n_mu, dim_m_mu) = mu_shape
self._dim_n_mu = dim_n_mu
self._dim_m_mu = dim_m_mu
self._bernstein_x = None
self._bernstein_y = None
self._shift_mesh_points = None
self._no_of_dim = 2
self._no_of_mesh_points = 0
@property
def mu_shape(self):
return self._dim_n_mu, self._dim_m_mu
def offline(self, nodes_reference):
# apply transformation to original mesh points
reference_frame_mesh_points = self._transformation(nodes_reference.T - self._origin_box).T
# select mesh points inside bounding box
# not necessary?:
# TODO: Raise error if not in bounding box
# mesh_points = reference_frame_mesh_points[
# (reference_frame_mesh_points[:, 0] >= 0.)
# & (reference_frame_mesh_points[:, 0] <= 1.) &
# (reference_frame_mesh_points[:, 1] >= 0.) &
# (reference_frame_mesh_points[:, 1] <= 1.) &
# (reference_frame_mesh_points[:, 2] >= 0.) &
# (reference_frame_mesh_points[:, 2] <= 1.)]
mesh_points = reference_frame_mesh_points
(n_rows_mesh, n_cols_mesh) = mesh_points.shape
self._no_of_mesh_points = n_rows_mesh
self._no_of_dim = n_cols_mesh
# Initialization. In order to exploit the contiguity in memory the
# following are transposed
self._bernstein_x = np.zeros((self._dim_n_mu, n_rows_mesh))
self._bernstein_y = np.zeros((self._dim_m_mu, n_rows_mesh))
for i in range(0, self._dim_n_mu):
aux1 = np.power((1 - mesh_points[:, 0]), self._dim_n_mu - 1 - i)
aux2 = np.power(mesh_points[:, 0], i)
self._bernstein_x[i, :] = binom(self._dim_n_mu - 1, i) * np.multiply(
aux1, aux2)
for i in range(0, self._dim_m_mu):
aux1 = np.power((1 - mesh_points[:, 1]), self._dim_m_mu - 1 - i)
aux2 = np.power(mesh_points[:, 1], i)
self._bernstein_y[i, :] = binom(self._dim_m_mu - 1, i) * np.multiply(
aux1, aux2)
def morph(self, nodes_reference, mu_x, mu_y):
shifted_mesh_points = np.zeros((self._no_of_dim, self._no_of_mesh_points))
aux_x = 0.
aux_y = 0.
for j in range(0, self._dim_m_mu):
for i in range(0, self._dim_n_mu):
aux = np.multiply(self._bernstein_x[i, :], self._bernstein_y[j,:])
aux_x += aux * mu_x[i, j]
aux_y += aux * mu_y[i, j]
shifted_mesh_points[0, :] += aux_x
shifted_mesh_points[1, :] += aux_y
# shift_mesh_points needs to be transposed to be summed with mesh_points
# apply inverse transformation to shifted mesh points
new_mesh_points = self._inverse_transformation(shifted_mesh_points).T + nodes_reference
# NOT NECESSARY:
# merge non-shifted mesh points with shifted ones
#modified_mesh_points = np.copy(self.original_mesh_points)
# The next is commented out and replaced by Meyer:
#self.modified_mesh_points[(reference_frame_mesh_points[:, 0] >= 0.)
# & (reference_frame_mesh_points[:, 0] <= 1.) &
# (reference_frame_mesh_points[:, 1] >= 0.) &
# (reference_frame_mesh_points[:, 1] <= 1.) &
# (reference_frame_mesh_points[:, 2] >= 0.) &
# (reference_frame_mesh_points[:, 2] <=
# 1.)] = new_mesh_points
return new_mesh_points
| 42.194872
| 118
| 0.610233
| 1,154
| 8,228
| 3.97227
| 0.12825
| 0.146161
| 0.02356
| 0.089005
| 0.80781
| 0.80781
| 0.801483
| 0.766798
| 0.737784
| 0.737784
| 0
| 0.021449
| 0.280384
| 8,228
| 194
| 119
| 42.412371
| 0.752744
| 0.253038
| 0
| 0.663793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0
| 1
| 0.068966
| false
| 0
| 0.025862
| 0.017241
| 0.146552
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc6d2b10850c4ba76866389abc5bae8103a3803f
| 24,712
|
py
|
Python
|
tests/entanglement_management/test_purification.py
|
aliro-technologies/SeQUeNCe
|
7bd22af52e04821b407b1712d853920f4f8dd609
|
[
"BSD-3-Clause"
] | null | null | null |
tests/entanglement_management/test_purification.py
|
aliro-technologies/SeQUeNCe
|
7bd22af52e04821b407b1712d853920f4f8dd609
|
[
"BSD-3-Clause"
] | null | null | null |
tests/entanglement_management/test_purification.py
|
aliro-technologies/SeQUeNCe
|
7bd22af52e04821b407b1712d853920f4f8dd609
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy
import pytest
from sequence.components.memory import Memory
from sequence.components.optical_channel import ClassicalChannel
from sequence.kernel.timeline import Timeline
from sequence.entanglement_management.purification import *
from sequence.topology.node import Node
numpy.random.seed(0)
class FakeResourceManager():
def __init__(self, owner):
self.log = []
def update(self, protocol, memory, state):
self.log.append((memory, state))
if state == "RAW":
memory.reset()
class FakeNode(Node):
def __init__(self, name, tl, **kwargs):
Node.__init__(self, name, tl)
self.msg_log = []
self.resource_manager = FakeResourceManager(self)
def receive_message(self, src: str, msg: "Message"):
self.msg_log.append((self.timeline.now(), src, msg))
for protocol in self.protocols:
if protocol.name == msg.receiver:
protocol.received_message(src, msg)
def test_BBPSSWMessage():
msg = BBPSSWMessage(BBPSSWMsgType.PURIFICATION_RES, "another", meas_res=0)
assert msg.msg_type == BBPSSWMsgType.PURIFICATION_RES
assert msg.receiver == "another"
assert msg.meas_res == 0
with pytest.raises(Exception):
BBPSSWMessage("unknown type")
phi_plus = [0.5 ** 0.5, 0, 0, 0.5 ** 0.5]
phi_minus = [0.5 ** 0.5, 0, 0, -(0.5 ** 0.5)]
psi_plus = [0, 0.5 ** 0.5, 0.5 ** 0.5, 0]
psi_minus = [0, 0.5 ** 0.5, -(0.5 ** 0.5), 0]
def create_scenario(state1, state2, seed):
tl = Timeline()
tl.seed(seed)
a1 = FakeNode("a1", tl)
a2 = FakeNode("a2", tl)
cc0 = ClassicalChannel("cc0", tl, 0, 1e5)
cc1 = ClassicalChannel("cc1", tl, 0, 1e5)
cc0.delay = 1e9
cc1.delay = 1e9
cc0.set_ends(a1, a2)
cc1.set_ends(a2, a1)
kept1 = Memory('kept1', tl, fidelity=1, frequency=0, efficiency=1, coherence_time=1, wavelength=500)
kept2 = Memory('kept2', tl, fidelity=1, frequency=0, efficiency=1, coherence_time=1, wavelength=500)
meas1 = Memory('mea1', tl, fidelity=1, frequency=0, efficiency=1, coherence_time=1, wavelength=500)
meas2 = Memory('mea2', tl, fidelity=1, frequency=0, efficiency=1, coherence_time=1, wavelength=500)
tl.init()
tl.quantum_manager.set([kept1.qstate_key, kept2.qstate_key], state1)
tl.quantum_manager.set([meas1.qstate_key, meas2.qstate_key], state2)
kept1.entangled_memory = {'node_id': 'a2', 'memo_id': 'kept2'}
kept2.entangled_memory = {'node_id': 'a1', 'memo_id': 'kept1'}
meas1.entangled_memory = {'node_id': 'a2', 'memo_id': 'meas2'}
meas2.entangled_memory = {'node_id': 'a1', 'memo_id': 'meas1'}
kept1.fidelity = kept2.fidelity = meas1.fidelity = meas2.fidelity = 1
ep1 = BBPSSW(a1, "a1.ep1", kept1, meas1)
ep2 = BBPSSW(a2, "a2.ep2", kept2, meas2)
a1.protocols.append(ep1)
a2.protocols.append(ep2)
ep1.set_others(ep2)
ep2.set_others(ep1)
ep1.start()
ep2.start()
tl.run()
assert meas1.entangled_memory == meas2.entangled_memory == {'node_id': None, 'memo_id': None}
return tl, kept1, kept2, meas1, meas2, ep1, ep2
def complex_array_equal(arr1, arr2, precision=5):
for c1, c2 in zip(arr1, arr2):
if abs(c1 - c2) >= 1 ** -precision:
return False
return True
def correct_order(state, keys):
if keys[0] > keys[1]:
return numpy.array([[1, 0, 0, 0], [0, 0, 1, 0], [0, 1, 0, 0], [0, 0, 0, 1]]) @ state
sqrt_2 = complex(0.5 ** 0.5)
def test_BBPSSW_phi_plus_phi_plus():
"""
phi+ phi+
0b0
[0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]
0b1
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b10
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b11
[0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(phi_plus, phi_plus, i)
assert kept1.entangled_memory == {'node_id': 'a2', 'memo_id': 'kept2'}
assert kept2.entangled_memory == {'node_id': 'a1', 'memo_id': 'kept1'}
assert ep1.meas_res == ep2.meas_res
if ep1.meas_res == 0:
counter += 1
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) == id(ket2)
assert kept1.qstate_key in ket1.keys and kept2.qstate_key in ket1.keys
state = correct_order(ket1.state, ket1.keys)
assert complex_array_equal(phi_plus, state)
# assert kept1 and kept2 point to the same Ketstate
# assert the state is phi+
assert abs(counter - 50) < 10
def test_BBPSSW_phi_plus_phi_minus():
"""
phi+ phi-
0b0
[ 0.5+0.j 0. +0.j 0. +0.j -0.5+0.j]
0b1
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b10
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b11
[-0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(phi_plus, phi_minus, i)
assert kept1.entangled_memory == {'node_id': 'a2', 'memo_id': 'kept2'}
assert kept2.entangled_memory == {'node_id': 'a1', 'memo_id': 'kept1'}
assert ep1.meas_res == ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) == id(ket2)
assert kept1.qstate_key in ket1.keys and kept2.qstate_key in ket1.keys
state = correct_order(ket1.state, ket1.keys)
if ep1.meas_res == 0:
counter += 1
assert complex_array_equal(phi_minus, state)
else:
assert complex_array_equal([-sqrt_2, 0, 0, sqrt_2], state)
assert abs(counter - 50) < 10
def test_BBPSSW_phi_minus_phi_plus():
"""
phi- phi+
0b0
[ 0.5+0.j 0. +0.j 0. +0.j -0.5+0.j]
0b1
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b10
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b11
[ 0.5+0.j 0. +0.j 0. +0.j -0.5+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(phi_minus, phi_plus, i)
assert kept1.entangled_memory == {'node_id': 'a2', 'memo_id': 'kept2'}
assert kept2.entangled_memory == {'node_id': 'a1', 'memo_id': 'kept1'}
assert ep1.meas_res == ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) == id(ket2)
assert kept1.qstate_key in ket1.keys and kept2.qstate_key in ket1.keys
state = correct_order(ket1.state, ket1.keys)
assert complex_array_equal(phi_minus, state)
if ep1.meas_res == 0:
counter += 1
else:
pass
assert abs(counter - 50) < 10
def test_BBPSSW_phi_minus_phi_minus():
"""
phi- phi-
0b0
[0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]
0b1
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b10
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b11
[-0.5+0.j 0. +0.j 0. +0.j -0.5+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(phi_minus, phi_minus, i)
assert kept1.entangled_memory == {'node_id': 'a2', 'memo_id': 'kept2'}
assert kept2.entangled_memory == {'node_id': 'a1', 'memo_id': 'kept1'}
assert ep1.meas_res == ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) == id(ket2)
assert kept1.qstate_key in ket1.keys and kept2.qstate_key in ket1.keys
state = correct_order(ket1.state, ket1.keys)
if ep1.meas_res == 0:
counter += 1
assert complex_array_equal(phi_plus, state)
else:
assert complex_array_equal([-sqrt_2, 0, 0, -sqrt_2], state)
assert abs(counter - 50) < 10
def test_BBPSSW_phi_plus_psi_plus():
"""
phi+ psi+
0b0
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b1
[0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]
0b10
[0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]
0b11
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(phi_plus, psi_plus, i)
assert kept1.entangled_memory == kept2.entangled_memory == {'node_id': None, 'memo_id': None}
assert ep1.meas_res != ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) != id(ket2)
assert len(ket1.keys) == len(ket2.keys) == 1
if ep1.meas_res == 0:
counter += 1
assert abs(counter - 50) < 10
def test_BBPSSW_phi_plus_psi_minus():
"""
phi+ psi-
0b0
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b1
[ 0.5+0.j 0. +0.j 0. +0.j -0.5+0.j]
0b10
[-0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]
0b11
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(phi_plus, psi_minus, i)
assert kept1.entangled_memory == kept2.entangled_memory == {'node_id': None, 'memo_id': None}
assert ep1.meas_res != ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) != id(ket2)
assert len(ket1.keys) == len(ket2.keys) == 1
if ep1.meas_res == 0:
counter += 1
assert abs(counter - 50) < 10
def test_BBPSSW_phi_minus_psi_plus():
"""
phi- psi+
0b0
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b1
[ 0.5+0.j 0. +0.j 0. +0.j -0.5+0.j]
0b10
[ 0.5+0.j 0. +0.j 0. +0.j -0.5+0.j]
0b11
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(phi_minus, psi_plus, i)
assert kept1.entangled_memory == kept2.entangled_memory == {'node_id': None, 'memo_id': None}
assert ep1.meas_res != ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) != id(ket2)
assert len(ket1.keys) == len(ket2.keys) == 1
if ep1.meas_res == 0:
counter += 1
assert abs(counter - 50) < 10
def test_BBPSSW_phi_minus_psi_minus():
"""
phi- psi-
0b0
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b1
[0.5+0.j 0. +0.j 0. +0.j 0.5+0.j]
0b10
[-0.5+0.j 0. +0.j 0. +0.j -0.5+0.j]
0b11
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(phi_minus, psi_minus, i)
assert kept1.entangled_memory == kept2.entangled_memory == {'node_id': None, 'memo_id': None}
assert ep1.meas_res != ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) != id(ket2)
assert len(ket1.keys) == len(ket2.keys) == 1
if ep1.meas_res == 0:
counter += 1
assert abs(counter - 50) < 10
def test_BBPSSW_psi_plus_phi_plus():
"""
psi+ phi+
0b0
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b1
[0. +0.j 0.5+0.j 0.5+0.j 0. +0.j]
0b10
[0. +0.j 0.5+0.j 0.5+0.j 0. +0.j]
0b11
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(psi_plus, phi_plus, i)
assert kept1.entangled_memory == kept2.entangled_memory == {'node_id': None, 'memo_id': None}
assert ep1.meas_res != ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) != id(ket2)
assert len(ket1.keys) == len(ket2.keys) == 1
if ep1.meas_res == 0:
counter += 1
assert abs(counter - 50) < 10
def test_BBPSSW_psi_plus_phi_minus():
"""
psi+ phi-
0b0
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b1
[ 0. +0.j -0.5+0.j 0.5+0.j 0. +0.j]
0b10
[ 0. +0.j 0.5+0.j -0.5+0.j 0. +0.j]
0b11
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(psi_plus, phi_minus, i)
assert kept1.entangled_memory == kept2.entangled_memory == {'node_id': None, 'memo_id': None}
assert ep1.meas_res != ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) != id(ket2)
assert len(ket1.keys) == len(ket2.keys) == 1
if ep1.meas_res == 0:
counter += 1
assert abs(counter - 50) < 10
def test_BBPSSW_psi_minus_phi_plus():
"""
psi- phi+
0b0
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b1
[ 0. +0.j 0.5+0.j -0.5+0.j 0. +0.j]
0b10
[ 0. +0.j 0.5+0.j -0.5+0.j 0. +0.j]
0b11
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(psi_minus, phi_plus, i)
assert kept1.entangled_memory == kept2.entangled_memory == {'node_id': None, 'memo_id': None}
assert ep1.meas_res != ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) != id(ket2)
assert len(ket1.keys) == len(ket2.keys) == 1
if ep1.meas_res == 0:
counter += 1
assert abs(counter - 50) < 10
def test_BBPSSW_psi_minus_phi_minus():
"""
psi- phi-
0b0
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b1
[ 0. +0.j -0.5+0.j -0.5+0.j 0. +0.j]
0b10
[0. +0.j 0.5+0.j 0.5+0.j 0. +0.j]
0b11
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(psi_minus, phi_minus, i)
assert kept1.entangled_memory == kept2.entangled_memory == {'node_id': None, 'memo_id': None}
assert ep1.meas_res != ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) != id(ket2)
assert len(ket1.keys) == len(ket2.keys) == 1
if ep1.meas_res == 0:
counter += 1
assert abs(counter - 50) < 10
def test_BBPSSW_psi_plus_psi_plus():
"""
psi+ psi+
0b0
[0. +0.j 0.5+0.j 0.5+0.j 0. +0.j]
0b1
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b10
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b11
[0. +0.j 0.5+0.j 0.5+0.j 0. +0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(psi_plus, psi_plus, i)
assert kept1.entangled_memory == {'node_id': 'a2', 'memo_id': 'kept2'}
assert kept2.entangled_memory == {'node_id': 'a1', 'memo_id': 'kept1'}
assert ep1.meas_res == ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) == id(ket2)
assert kept1.qstate_key in ket1.keys and kept2.qstate_key in ket1.keys
state = correct_order(ket1.state, ket1.keys)
if ep1.meas_res == 0:
counter += 1
assert complex_array_equal(psi_plus, state)
else:
assert complex_array_equal(psi_plus, state)
assert abs(counter - 50) < 10
def test_BBPSSW_psi_plus_psi_minus():
"""
psi+ psi-
0b0
[ 0. +0.j 0.5+0.j -0.5+0.j 0. +0.j]
0b1
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b10
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b11
[ 0. +0.j -0.5+0.j 0.5+0.j 0. +0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(psi_plus, psi_minus, i)
assert kept1.entangled_memory == {'node_id': 'a2', 'memo_id': 'kept2'}
assert kept2.entangled_memory == {'node_id': 'a1', 'memo_id': 'kept1'}
assert ep1.meas_res == ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) == id(ket2)
assert kept1.qstate_key in ket1.keys and kept2.qstate_key in ket1.keys
state = correct_order(ket1.state, ket1.keys)
if ep1.meas_res == 0:
counter += 1
assert complex_array_equal(psi_minus, state)
else:
assert complex_array_equal([0, -sqrt_2, sqrt_2, 0], state)
assert abs(counter - 50) < 10
def test_BBPSSW_psi_minus_psi_plus():
"""
psi- psi+
0b0
[ 0. +0.j 0.5+0.j -0.5+0.j 0. +0.j]
0b1
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b10
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b11
[ 0. +0.j 0.5+0.j -0.5+0.j 0. +0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(psi_minus, psi_plus, i)
assert kept1.entangled_memory == {'node_id': 'a2', 'memo_id': 'kept2'}
assert kept2.entangled_memory == {'node_id': 'a1', 'memo_id': 'kept1'}
assert ep1.meas_res == ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) == id(ket2)
assert kept1.qstate_key in ket1.keys and kept2.qstate_key in ket1.keys
state = correct_order(ket1.state, ket1.keys)
assert complex_array_equal(psi_minus, state)
if ep1.meas_res == 0:
counter += 1
else:
# assert quantum state
pass
assert abs(counter - 50) < 10
def test_BBPSSW_psi_minus_psi_minus():
"""
psi- psi-
0b0
[0. +0.j 0.5+0.j 0.5+0.j 0. +0.j]
0b1
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b10
[0.+0.j 0.+0.j 0.+0.j 0.+0.j]
0b11
[ 0. +0.j -0.5+0.j -0.5+0.j 0. +0.j]
"""
counter = 0
for i in range(100):
tl, kept1, kept2, meas1, meas2, ep1, ep2 = create_scenario(psi_minus, psi_minus, i)
assert kept1.entangled_memory == {'node_id': 'a2', 'memo_id': 'kept2'}
assert kept2.entangled_memory == {'node_id': 'a1', 'memo_id': 'kept1'}
assert ep1.meas_res == ep2.meas_res
ket1 = tl.quantum_manager.get(kept1.qstate_key)
ket2 = tl.quantum_manager.get(kept2.qstate_key)
assert id(ket1) == id(ket2)
assert kept1.qstate_key in ket1.keys and kept2.qstate_key in ket1.keys
state = correct_order(ket1.state, ket1.keys)
if ep1.meas_res == 0:
counter += 1
assert complex_array_equal(phi_plus, state)
else:
assert complex_array_equal([0, -sqrt_2, -sqrt_2, 0], state)
assert abs(counter - 50) < 10
bell_states = [phi_plus, phi_minus, psi_plus, psi_minus]
def test_BBPSSW_fidelity():
tl = Timeline()
a1 = FakeNode("a1", tl)
a2 = FakeNode("a2", tl)
cc0 = ClassicalChannel("cc0", tl, 0, 1e5)
cc1 = ClassicalChannel("cc1", tl, 0, 1e5)
cc0.delay = 1e9
cc1.delay = 1e9
cc0.set_ends(a1, a2)
cc1.set_ends(a2, a1)
tl.init()
for i in range(1000):
fidelity = numpy.random.uniform(0.5, 1)
kept_memo1 = Memory("a1.kept", tl, fidelity=fidelity, frequency=0, efficiency=1, coherence_time=1,
wavelength=500)
kept_memo2 = Memory("a2.kept", tl, fidelity, 0, 1, 1, 500)
meas_memo1 = Memory("a1.meas", tl, fidelity, 0, 1, 1, 500)
meas_memo2 = Memory("a2.meas", tl, fidelity, 0, 1, 1, 500)
kept_memo1.entangled_memory["node_id"] = "a2"
kept_memo1.entangled_memory["memo_id"] = "a2.kept"
kept_memo1.fidelity = fidelity
kept_memo2.entangled_memory["node_id"] = "a1"
kept_memo2.entangled_memory["memo_id"] = "a1.kept"
kept_memo2.fidelity = fidelity
meas_memo1.entangled_memory["node_id"] = "a2"
meas_memo1.entangled_memory["memo_id"] = "a2.meas"
meas_memo1.fidelity = fidelity
meas_memo2.entangled_memory["node_id"] = "a1"
meas_memo2.entangled_memory["memo_id"] = "a1.meas"
meas_memo2.fidelity = fidelity
pair1 = numpy.random.choice([0, 1, 2, 3], 1,
p=[fidelity, (1 - fidelity) / 3, (1 - fidelity) / 3, (1 - fidelity) / 3])
pair2 = numpy.random.choice([0, 1, 2, 3], 1,
p=[fidelity, (1 - fidelity) / 3, (1 - fidelity) / 3, (1 - fidelity) / 3])
tl.quantum_manager.set([kept_memo1.qstate_key, kept_memo2.qstate_key], bell_states[pair1[0]])
tl.quantum_manager.set([meas_memo1.qstate_key, meas_memo2.qstate_key], bell_states[pair2[0]])
ep1 = BBPSSW(a1, "a1.ep1.%d" % i, kept_memo1, meas_memo1)
ep2 = BBPSSW(a2, "a2.ep2.%d" % i, kept_memo2, meas_memo2)
a1.protocols.append(ep1)
a2.protocols.append(ep2)
ep1.set_others(ep2)
ep2.set_others(ep1)
ep1.start()
ep2.start()
tl.run()
assert a1.resource_manager.log[-2] == (meas_memo1, "RAW")
assert a2.resource_manager.log[-2] == (meas_memo2, "RAW")
assert meas_memo1.fidelity == meas_memo2.fidelity == 0
if ep1.meas_res == ep2.meas_res:
assert kept_memo1.fidelity == kept_memo2.fidelity == BBPSSW.improved_fidelity(fidelity)
assert kept_memo1.entangled_memory["node_id"] == "a2" and kept_memo2.entangled_memory["node_id"] == "a1"
assert a1.resource_manager.log[-1] == (kept_memo1, "ENTANGLED")
assert a2.resource_manager.log[-1] == (kept_memo2, "ENTANGLED")
else:
assert kept_memo1.fidelity == kept_memo2.fidelity == 0
assert kept_memo1.entangled_memory["node_id"] == kept_memo2.entangled_memory["node_id"] == None
assert a1.resource_manager.log[-1] == (kept_memo1, "RAW")
assert a2.resource_manager.log[-1] == (kept_memo2, "RAW")
def test_BBPSSW_success_rate():
tl = Timeline()
a1 = FakeNode("a1", tl)
a2 = FakeNode("a2", tl)
cc0 = ClassicalChannel("cc0", tl, 0, 1e5)
cc1 = ClassicalChannel("cc1", tl, 0, 1e5)
cc0.delay = 1e9
cc1.delay = 1e9
cc0.set_ends(a1, a2)
cc1.set_ends(a2, a1)
tl.init()
counter1 = counter2 = 0
fidelity = 0.8
for i in range(1000):
kept_memo1 = Memory("a1.kept", tl, fidelity=fidelity, frequency=0, efficiency=1, coherence_time=1,
wavelength=500)
kept_memo2 = Memory("a2.kept", tl, fidelity, 0, 1, 1, 500)
meas_memo1 = Memory("a1.meas", tl, fidelity, 0, 1, 1, 500)
meas_memo2 = Memory("a2.meas", tl, fidelity, 0, 1, 1, 500)
kept_memo1.entangled_memory["node_id"] = "a2"
kept_memo1.entangled_memory["memo_id"] = "a2.kept"
kept_memo1.fidelity = fidelity
kept_memo2.entangled_memory["node_id"] = "a1"
kept_memo2.entangled_memory["memo_id"] = "a1.kept"
kept_memo2.fidelity = fidelity
meas_memo1.entangled_memory["node_id"] = "a2"
meas_memo1.entangled_memory["memo_id"] = "a2.meas"
meas_memo1.fidelity = fidelity
meas_memo2.entangled_memory["node_id"] = "a1"
meas_memo2.entangled_memory["memo_id"] = "a1.meas"
meas_memo2.fidelity = fidelity
pair1 = numpy.random.choice([0, 1, 2, 3], 1,
p=[fidelity, (1 - fidelity) / 3, (1 - fidelity) / 3, (1 - fidelity) / 3])
pair2 = numpy.random.choice([0, 1, 2, 3], 1,
p=[fidelity, (1 - fidelity) / 3, (1 - fidelity) / 3, (1 - fidelity) / 3])
tl.quantum_manager.set([kept_memo1.qstate_key, kept_memo2.qstate_key], bell_states[pair1[0]])
tl.quantum_manager.set([meas_memo1.qstate_key, meas_memo2.qstate_key], bell_states[pair2[0]])
ep1 = BBPSSW(a1, "a1.ep1.%d" % i, kept_memo1, meas_memo1)
ep2 = BBPSSW(a2, "a2.ep2.%d" % i, kept_memo2, meas_memo2)
a1.protocols.append(ep1)
a2.protocols.append(ep2)
ep1.set_others(ep2)
ep2.set_others(ep1)
ep1.start()
ep2.start()
if ep1.meas_res == ep2.meas_res:
counter1 += 1
else:
counter2 += 1
tl.run()
assert abs(counter1 / (counter1 + counter2) - BBPSSW.success_probability(fidelity)) < 0.1
| 33.439783
| 116
| 0.573041
| 3,888
| 24,712
| 3.48714
| 0.043724
| 0.037764
| 0.042484
| 0.042484
| 0.865246
| 0.85728
| 0.853592
| 0.82018
| 0.806977
| 0.800634
| 0
| 0.100702
| 0.27347
| 24,712
| 738
| 117
| 33.485095
| 0.65445
| 0.123665
| 0
| 0.728736
| 0
| 0
| 0.04689
| 0
| 0
| 0
| 0
| 0
| 0.268966
| 1
| 0.05977
| false
| 0.004598
| 0.016092
| 0
| 0.089655
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc81df728f1513bcd9cb85d00a52d823adfe75de
| 4,192
|
py
|
Python
|
tests/test_environment.py
|
agajews/hltex
|
6d1529a7e134e4d852e7815b89e1893ab482f472
|
[
"MIT"
] | 6
|
2019-01-07T21:21:55.000Z
|
2019-08-16T09:53:14.000Z
|
tests/test_environment.py
|
agajews/hltex
|
6d1529a7e134e4d852e7815b89e1893ab482f472
|
[
"MIT"
] | null | null | null |
tests/test_environment.py
|
agajews/hltex
|
6d1529a7e134e4d852e7815b89e1893ab482f472
|
[
"MIT"
] | 1
|
2019-03-03T11:58:01.000Z
|
2019-03-03T11:58:01.000Z
|
from hltex.control import Environment
from hltex.state import State
from hltex.translator import parse_custom_environment
def test_parse():
source = ": \\ Hey"
state = State(source)
def translate_fn(_state, body):
return "\\begin{itemize}\\item %s\\end{itemize}" % body
res = parse_custom_environment(state, Environment("test", translate_fn, ""), 0)
print(repr(res))
assert res == "\\begin{itemize}\\item \\ Hey\\end{itemize}"
assert state.pos == len(source)
def test_newline():
source = ": \\ Hey\n"
state = State(source)
def translate_fn(_state, body):
return "\\begin{itemize}\\item %s\\end{itemize}" % body
res = parse_custom_environment(state, Environment("test", translate_fn, ""), 0)
print(repr(res))
assert res == "\\begin{itemize}\\item \\ Hey\\end{itemize}"
assert source[state.pos] == "\n"
def test_not_eof():
source = ": \\ Hey\n123"
state = State(source)
def translate_fn(_state, body):
return "\\begin{itemize}\\item %s\\end{itemize}" % body
res = parse_custom_environment(state, Environment("test", translate_fn, ""), 0)
print(repr(res))
assert res == "\\begin{itemize}\\item \\ Hey\\end{itemize}"
assert source[state.pos] == "\n"
def test_block():
source = ":\n Hey\n Hey again\n123"
state = State(source)
def translate_fn(_state, body):
return "\\begin{itemize}\\item %s\\end{itemize}" % body
res = parse_custom_environment(state, Environment("test", translate_fn, ""), 0)
print(repr(res))
assert res == "\\begin{itemize}\\item \nHey\nHey again\n\\end{itemize}"
assert source[state.pos] == "\n"
def test_block_eof():
source = ":\n Hey\n Hey again"
state = State(source)
def translate_fn(_state, body):
return "\\begin{itemize}\\item %s\\end{itemize}" % body
res = parse_custom_environment(state, Environment("test", translate_fn, ""), 0)
print(repr(res))
assert res == "\\begin{itemize}\\item \nHey\nHey again\n\\end{itemize}"
assert state.pos == len(source)
def test_parse_raw():
source = ": \\ H}ey"
state = State(source)
def translate_fn(_state, body):
return "\\begin{itemize}\\item %s\\end{itemize}" % body
res = parse_custom_environment(
state, Environment("test", translate_fn, "", raw=True), 0
)
print(repr(res))
assert res == "\\begin{itemize}\\item \\ H}ey\\end{itemize}"
assert state.pos == len(source)
def test_newline_raw():
source = ": \\ H}ey\n"
state = State(source)
def translate_fn(_state, body):
return "\\begin{itemize}\\item %s\\end{itemize}" % body
res = parse_custom_environment(
state, Environment("test", translate_fn, "", raw=True), 0
)
print(repr(res))
assert res == "\\begin{itemize}\\item \\ H}ey\\end{itemize}"
assert source[state.pos] == "\n"
def test_not_eof_raw():
source = ": \\ H}ey\n123"
state = State(source)
def translate_fn(_state, body):
return "\\begin{itemize}\\item %s\\end{itemize}" % body
res = parse_custom_environment(
state, Environment("test", translate_fn, "", raw=True), 0
)
print(repr(res))
assert res == "\\begin{itemize}\\item \\ H}ey\\end{itemize}"
assert source[state.pos] == "\n"
def test_block_raw():
source = ":\n H}ey\n H{ey again\n123"
state = State(source)
def translate_fn(_state, body):
return "\\begin{itemize}\\item %s\\end{itemize}" % body
res = parse_custom_environment(
state, Environment("test", translate_fn, "", raw=True), 0
)
print(repr(res))
assert res == "\\begin{itemize}\\item \nH}ey\nH{ey again\n\\end{itemize}"
assert source[state.pos] == "\n"
def test_block_eof_raw():
source = ":\n H}ey\n H{ey again"
state = State(source)
def translate_fn(_state, body):
return "\\begin{itemize}\\item %s\\end{itemize}" % body
res = parse_custom_environment(
state, Environment("test", translate_fn, "", raw=True), 0
)
print(repr(res))
assert res == "\\begin{itemize}\\item \nH}ey\nH{ey again\n\\end{itemize}"
assert state.pos == len(source)
| 29.111111
| 83
| 0.619752
| 554
| 4,192
| 4.561372
| 0.075812
| 0.08706
| 0.126632
| 0.075188
| 0.926791
| 0.926791
| 0.91571
| 0.91571
| 0.91571
| 0.886031
| 0
| 0.006601
| 0.204914
| 4,192
| 143
| 84
| 29.314685
| 0.751575
| 0
| 0
| 0.728155
| 0
| 0.019417
| 0.260019
| 0.125
| 0
| 0
| 0
| 0
| 0.194175
| 1
| 0.194175
| false
| 0
| 0.029126
| 0.097087
| 0.320388
| 0.097087
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc9e05cb9a5d0d520bd9c921f8a1f8b0b0de567e
| 8,155
|
py
|
Python
|
src/bpp/migrations/0084_strona_tom_nr_zeszytu.py
|
iplweb/django-bpp
|
85f183a99d8d5027ae4772efac1e4a9f21675849
|
[
"BSD-3-Clause"
] | 1
|
2017-04-27T19:50:02.000Z
|
2017-04-27T19:50:02.000Z
|
src/bpp/migrations/0084_strona_tom_nr_zeszytu.py
|
mpasternak/django-bpp
|
434338821d5ad1aaee598f6327151aba0af66f5e
|
[
"BSD-3-Clause"
] | 41
|
2019-11-07T00:07:02.000Z
|
2022-02-27T22:09:39.000Z
|
src/bpp/migrations/0084_strona_tom_nr_zeszytu.py
|
iplweb/bpp
|
f027415cc3faf1ca79082bf7bacd4be35b1a6fdf
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-07-17 07:23
from __future__ import unicode_literals
from django.db import migrations, models
from bpp.models.abstract import wez_zakres_stron, parse_informacje
def przerzuc_dane(apps, schema_editor):
for model in ['Wydawnictwo_Ciagle', 'Wydawnictwo_Zwarte',
'Patent', 'Praca_Doktorska', 'Praca_Habilitacyjna']:
klass = apps.get_model("bpp", model)
for elem in klass.objects.all():
if hasattr(elem, 'strony'):
if elem.szczegoly:
s = wez_zakres_stron(elem.szczegoly)
if not elem.strony:
if s != elem.strony:
elem.strony = s
changed = True
if elem.informacje is not None:
tom = parse_informacje(elem.informacje, "tom")
nr_zeszytu = parse_informacje(elem.informacje, "numer")
if hasattr(elem, 'tom'):
if not elem.tom and tom:
if tom != elem.tom:
elem.tom = tom
chagned = True
if hasattr(elem, 'nr_zeszytu'):
if not elem.nr_zeszytu and nr_zeszytu:
if nr_zeszytu != elem.nr_zeszytu:
elem.nr_zeszytu = nr_zeszytu
changed = True
if changed:
elem.save()
class Migration(migrations.Migration):
dependencies = [
('bpp', '0083_afiliuje_zatrudniony'),
]
operations = [
migrations.AddField(
model_name='patent',
name='strony',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Szczegóły\' w chwili importu. Aby uniknąć sytuacji, gdy wskutek\n błędnego wprowadzenia tekstu do pola "Szczegóły" informacja ta nie \n będzie mogła być wyekstrahowana z tego pola, kliknij przycisk \n "Uzupełnij", aby spowodować uzupełnienie tego pola na podstawie\n pola "Szcegóły". \n ', max_length=50, null=True),
),
migrations.AddField(
model_name='patent',
name='tom',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Informacje\'. Kliknięcie przycisku "Uzupełnij" powoduje\n również automatyczne wypełnienie tego pola, o ile do formularza\n zostały wprowadzone odpowiednie informacje. ', max_length=50, null=True),
),
migrations.AddField(
model_name='praca_doktorska',
name='strony',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Szczegóły\' w chwili importu. Aby uniknąć sytuacji, gdy wskutek\n błędnego wprowadzenia tekstu do pola "Szczegóły" informacja ta nie \n będzie mogła być wyekstrahowana z tego pola, kliknij przycisk \n "Uzupełnij", aby spowodować uzupełnienie tego pola na podstawie\n pola "Szcegóły". \n ', max_length=50, null=True),
),
migrations.AddField(
model_name='praca_doktorska',
name='tom',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Informacje\'. Kliknięcie przycisku "Uzupełnij" powoduje\n również automatyczne wypełnienie tego pola, o ile do formularza\n zostały wprowadzone odpowiednie informacje. ', max_length=50, null=True),
),
migrations.AddField(
model_name='praca_habilitacyjna',
name='strony',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Szczegóły\' w chwili importu. Aby uniknąć sytuacji, gdy wskutek\n błędnego wprowadzenia tekstu do pola "Szczegóły" informacja ta nie \n będzie mogła być wyekstrahowana z tego pola, kliknij przycisk \n "Uzupełnij", aby spowodować uzupełnienie tego pola na podstawie\n pola "Szcegóły". \n ', max_length=50, null=True),
),
migrations.AddField(
model_name='praca_habilitacyjna',
name='tom',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Informacje\'. Kliknięcie przycisku "Uzupełnij" powoduje\n również automatyczne wypełnienie tego pola, o ile do formularza\n zostały wprowadzone odpowiednie informacje. ', max_length=50, null=True),
),
migrations.AddField(
model_name='wydawnictwo_ciagle',
name='nr_zeszytu',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Informacje\'. Kliknięcie przycisku "Uzupełnij" powoduje\n również automatyczne wypełnienie tego pola, o ile do formularza\n zostały wprowadzone odpowiednie informacje. ', max_length=50, null=True),
),
migrations.AddField(
model_name='wydawnictwo_ciagle',
name='strony',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Szczegóły\' w chwili importu. Aby uniknąć sytuacji, gdy wskutek\n błędnego wprowadzenia tekstu do pola "Szczegóły" informacja ta nie \n będzie mogła być wyekstrahowana z tego pola, kliknij przycisk \n "Uzupełnij", aby spowodować uzupełnienie tego pola na podstawie\n pola "Szcegóły". \n ', max_length=50, null=True),
),
migrations.AddField(
model_name='wydawnictwo_ciagle',
name='tom',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Informacje\'. Kliknięcie przycisku "Uzupełnij" powoduje\n również automatyczne wypełnienie tego pola, o ile do formularza\n zostały wprowadzone odpowiednie informacje. ', max_length=50, null=True),
),
migrations.AddField(
model_name='wydawnictwo_zwarte',
name='strony',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Szczegóły\' w chwili importu. Aby uniknąć sytuacji, gdy wskutek\n błędnego wprowadzenia tekstu do pola "Szczegóły" informacja ta nie \n będzie mogła być wyekstrahowana z tego pola, kliknij przycisk \n "Uzupełnij", aby spowodować uzupełnienie tego pola na podstawie\n pola "Szcegóły". \n ', max_length=50, null=True),
),
migrations.AddField(
model_name='wydawnictwo_zwarte',
name='tom',
field=models.CharField(blank=True, help_text='Jeżeli uzupełnione, to pole będzie eksportowane do \n danych PBN. Jeżeli puste, informacja ta będzie ekstrahowana z \n pola \'Informacje\'. Kliknięcie przycisku "Uzupełnij" powoduje\n również automatyczne wypełnienie tego pola, o ile do formularza\n zostały wprowadzone odpowiednie informacje. ', max_length=50, null=True),
),
migrations.RunPython(
przerzuc_dane,
migrations.RunPython.noop
)
]
| 72.8125
| 549
| 0.638504
| 935
| 8,155
| 5.490909
| 0.149733
| 0.037398
| 0.049279
| 0.05785
| 0.837359
| 0.830541
| 0.822945
| 0.822945
| 0.822945
| 0.822945
| 0
| 0.007335
| 0.281177
| 8,155
| 111
| 550
| 73.468468
| 0.868475
| 0.008338
| 0
| 0.589474
| 1
| 0.231579
| 0.552078
| 0.003093
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010526
| false
| 0
| 0.084211
| 0
| 0.126316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ccd48338b046785c59739014a2d212481f526c76
| 43,321
|
py
|
Python
|
polynomials_on_simplices/polynomial/polynomials_unit_simplex_lagrange_basis_cache.py
|
FAndersson/polynomials_on_simplices
|
f015a4772c817bfa99b0d6b726667a38a174b064
|
[
"MIT"
] | 1
|
2021-03-17T11:41:21.000Z
|
2021-03-17T11:41:21.000Z
|
polynomials_on_simplices/polynomial/polynomials_unit_simplex_lagrange_basis_cache.py
|
FAndersson/polynomials_on_simplices
|
f015a4772c817bfa99b0d6b726667a38a174b064
|
[
"MIT"
] | null | null | null |
polynomials_on_simplices/polynomial/polynomials_unit_simplex_lagrange_basis_cache.py
|
FAndersson/polynomials_on_simplices
|
f015a4772c817bfa99b0d6b726667a38a174b064
|
[
"MIT"
] | null | null | null |
"""Cached evaluation of coefficients for Lagrange basis polynomials on the unit simplex.
"""
lagrange_basis_coefficients_cache = [
# n = 1
[
# r = 1
{
(0,): [1, -1],
(1,): [0, 1],
},
# r = 2
{
(0,): [1, -3, 2],
(1,): [0, 4, -4],
(2,): [0, -1, 2],
},
# r = 3
{
(0,): [1, -11 / 2, 9, -9 / 2],
(1,): [0, 9, -45 / 2, 27 / 2],
(2,): [0, -9 / 2, 18, -27 / 2],
(3,): [0, 1, -9 / 2, 9 / 2],
},
# r = 4
{
(0,): [1, -25 / 3, 70 / 3, -80 / 3, 32 / 3],
(1,): [0, 16, -208 / 3, 96, -128 / 3],
(2,): [0, -12, 76, -128, 64],
(3,): [0, 16 / 3, -112 / 3, 224 / 3, -128 / 3],
(4,): [0, -1, 22 / 3, -16, 32 / 3],
},
],
# n = 2
[
# r = 1
{
(0, 0): [1, -1, -1],
(1, 0): [0, 1, 0],
(0, 1): [0, 0, 1],
},
# r = 2
{
(0, 0): [1, -3, 2, -3, 4, 2],
(1, 0): [0, 4, -4, 0, -4, 0],
(2, 0): [0, -1, 2, 0, 0, 0],
(0, 1): [0, 0, 0, 4, -4, -4],
(1, 1): [0, 0, 0, 0, 4, 0],
(0, 2): [0, 0, 0, -1, 0, 2],
},
# r = 3
{
(0, 0): [1, -11 / 2, 9, -9 / 2, -11 / 2, 18, -27 / 2, 9, -27 / 2, -9 / 2],
(1, 0): [0, 9, -45 / 2, 27 / 2, 0, -45 / 2, 27, 0, 27 / 2, 0],
(2, 0): [0, -9 / 2, 18, -27 / 2, 0, 9 / 2, -27 / 2, 0, 0, 0],
(3, 0): [0, 1, -9 / 2, 9 / 2, 0, 0, 0, 0, 0, 0],
(0, 1): [0, 0, 0, 0, 9, -45 / 2, 27 / 2, -45 / 2, 27, 27 / 2],
(1, 1): [0, 0, 0, 0, 0, 27, -27, 0, -27, 0],
(2, 1): [0, 0, 0, 0, 0, -9 / 2, 27 / 2, 0, 0, 0],
(0, 2): [0, 0, 0, 0, -9 / 2, 9 / 2, 0, 18, -27 / 2, -27 / 2],
(1, 2): [0, 0, 0, 0, 0, -9 / 2, 0, 0, 27 / 2, 0],
(0, 3): [0, 0, 0, 0, 1, 0, 0, -9 / 2, 0, 9 / 2],
},
# r = 4
{
(0, 0): [1, -25 / 3, 70 / 3, -80 / 3, 32 / 3, -25 / 3, 140 / 3, -80, 128 / 3, 70 / 3, -80, 64, -80 / 3,
128 / 3, 32 / 3],
(1, 0): [0, 16, -208 / 3, 96, -128 / 3, 0, -208 / 3, 192, -128, 0, 96, -128, 0, -128 / 3, 0],
(2, 0): [0, -12, 76, -128, 64, 0, 28, -144, 128, 0, -16, 64, 0, 0, 0],
(3, 0): [0, 16 / 3, -112 / 3, 224 / 3, -128 / 3, 0, -16 / 3, 32, -128 / 3, 0, 0, 0, 0, 0, 0],
(4, 0): [0, -1, 22 / 3, -16, 32 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1): [0, 0, 0, 0, 0, 16, -208 / 3, 96, -128 / 3, -208 / 3, 192, -128, 96, -128, -128 / 3],
(1, 1): [0, 0, 0, 0, 0, 0, 96, -224, 128, 0, -224, 256, 0, 128, 0],
(2, 1): [0, 0, 0, 0, 0, 0, -32, 160, -128, 0, 32, -128, 0, 0, 0],
(3, 1): [0, 0, 0, 0, 0, 0, 16 / 3, -32, 128 / 3, 0, 0, 0, 0, 0, 0],
(0, 2): [0, 0, 0, 0, 0, -12, 28, -16, 0, 76, -144, 64, -128, 128, 64],
(1, 2): [0, 0, 0, 0, 0, 0, -32, 32, 0, 0, 160, -128, 0, -128, 0],
(2, 2): [0, 0, 0, 0, 0, 0, 4, -16, 0, 0, -16, 64, 0, 0, 0],
(0, 3): [0, 0, 0, 0, 0, 16 / 3, -16 / 3, 0, 0, -112 / 3, 32, 0, 224 / 3, -128 / 3, -128 / 3],
(1, 3): [0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, -32, 0, 0, 128 / 3, 0],
(0, 4): [0, 0, 0, 0, 0, -1, 0, 0, 0, 22 / 3, 0, 0, -16, 0, 32 / 3],
},
],
# n = 3
[
# r = 1
{
(0, 0, 0): [1, -1, -1, -1],
(1, 0, 0): [0, 1, 0, 0],
(0, 1, 0): [0, 0, 1, 0],
(0, 0, 1): [0, 0, 0, 1],
},
# r = 2
{
(0, 0, 0): [1, -3, 2, -3, 4, 2, -3, 4, 4, 2],
(1, 0, 0): [0, 4, -4, 0, -4, 0, 0, -4, 0, 0],
(2, 0, 0): [0, -1, 2, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 0): [0, 0, 0, 4, -4, -4, 0, 0, -4, 0],
(1, 1, 0): [0, 0, 0, 0, 4, 0, 0, 0, 0, 0],
(0, 2, 0): [0, 0, 0, -1, 0, 2, 0, 0, 0, 0],
(0, 0, 1): [0, 0, 0, 0, 0, 0, 4, -4, -4, -4],
(1, 0, 1): [0, 0, 0, 0, 0, 0, 0, 4, 0, 0],
(0, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 4, 0],
(0, 0, 2): [0, 0, 0, 0, 0, 0, -1, 0, 0, 2],
},
# r = 3
{
(0, 0, 0): [1, -11 / 2, 9, -9 / 2, -11 / 2, 18, -27 / 2, 9, -27 / 2, -9 / 2, -11 / 2, 18, -27 / 2, 18, -27,
-27 / 2, 9, -27 / 2, -27 / 2, -9 / 2],
(1, 0, 0): [0, 9, -45 / 2, 27 / 2, 0, -45 / 2, 27, 0, 27 / 2, 0, 0, -45 / 2, 27, 0, 27, 0, 0, 27 / 2, 0, 0],
(2, 0, 0): [0, -9 / 2, 18, -27 / 2, 0, 9 / 2, -27 / 2, 0, 0, 0, 0, 9 / 2, -27 / 2, 0, 0, 0, 0, 0, 0, 0],
(3, 0, 0): [0, 1, -9 / 2, 9 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 0): [0, 0, 0, 0, 9, -45 / 2, 27 / 2, -45 / 2, 27, 27 / 2, 0, 0, 0, -45 / 2, 27, 27, 0, 0, 27 / 2, 0],
(1, 1, 0): [0, 0, 0, 0, 0, 27, -27, 0, -27, 0, 0, 0, 0, 0, -27, 0, 0, 0, 0, 0],
(2, 1, 0): [0, 0, 0, 0, 0, -9 / 2, 27 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 2, 0): [0, 0, 0, 0, -9 / 2, 9 / 2, 0, 18, -27 / 2, -27 / 2, 0, 0, 0, 9 / 2, 0, -27 / 2, 0, 0, 0, 0],
(1, 2, 0): [0, 0, 0, 0, 0, -9 / 2, 0, 0, 27 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 3, 0): [0, 0, 0, 0, 1, 0, 0, -9 / 2, 0, 9 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -45 / 2, 27 / 2, -45 / 2, 27, 27 / 2, -45 / 2, 27, 27, 27 / 2],
(1, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, -27, 0, -27, 0, 0, -27, 0, 0],
(2, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 27 / 2, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, -27, -27, 0, 0, -27, 0],
(1, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0],
(0, 2, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 27 / 2, 0, 0, 0, 0],
(0, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 9 / 2, 0, 9 / 2, 0, 0, 18, -27 / 2, -27 / 2, -27 / 2],
(1, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 0, 0, 0, 0, 27 / 2, 0, 0],
(0, 1, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 0, 0, 0, 27 / 2, 0],
(0, 0, 3): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, -9 / 2, 0, 0, 9 / 2],
},
# r = 4
{
(0, 0, 0): [1, -25 / 3, 70 / 3, -80 / 3, 32 / 3, -25 / 3, 140 / 3, -80, 128 / 3, 70 / 3, -80, 64, -80 / 3,
128 / 3, 32 / 3, -25 / 3, 140 / 3, -80, 128 / 3, 140 / 3, -160, 128, -80, 128, 128 / 3, 70 / 3,
-80, 64, -80, 128, 64, -80 / 3, 128 / 3, 128 / 3, 32 / 3],
(1, 0, 0): [0, 16, -208 / 3, 96, -128 / 3, 0, -208 / 3, 192, -128, 0, 96, -128, 0, -128 / 3, 0, 0, -208 / 3,
192, -128, 0, 192, -256, 0, -128, 0, 0, 96, -128, 0, -128, 0, 0, -128 / 3, 0, 0],
(2, 0, 0): [0, -12, 76, -128, 64, 0, 28, -144, 128, 0, -16, 64, 0, 0, 0, 0, 28, -144, 128, 0, -32, 128,
0, 0, 0, 0, -16, 64, 0, 0, 0, 0, 0, 0, 0],
(3, 0, 0): [0, 16 / 3, -112 / 3, 224 / 3, -128 / 3, 0, -16 / 3, 32, -128 / 3, 0, 0, 0, 0, 0, 0, 0, -16 / 3,
32, -128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(4, 0, 0): [0, -1, 22 / 3, -16, 32 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 0): [0, 0, 0, 0, 0, 16, -208 / 3, 96, -128 / 3, -208 / 3, 192, -128, 96, -128, -128 / 3, 0, 0, 0, 0,
-208 / 3, 192, -128, 192, -256, -128, 0, 0, 0, 96, -128, -128, 0, 0, -128 / 3, 0],
(1, 1, 0): [0, 0, 0, 0, 0, 0, 96, -224, 128, 0, -224, 256, 0, 128, 0, 0, 0, 0, 0, 0, -224, 256, 0, 256, 0,
0, 0, 0, 0, 128, 0, 0, 0, 0, 0],
(2, 1, 0): [0, 0, 0, 0, 0, 0, -32, 160, -128, 0, 32, -128, 0, 0, 0, 0, 0, 0, 0, 0, 32, -128, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(3, 1, 0): [0, 0, 0, 0, 0, 0, 16 / 3, -32, 128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0],
(0, 2, 0): [0, 0, 0, 0, 0, -12, 28, -16, 0, 76, -144, 64, -128, 128, 64, 0, 0, 0, 0, 28, -32, 0, -144, 128,
128, 0, 0, 0, -16, 0, 64, 0, 0, 0, 0],
(1, 2, 0): [0, 0, 0, 0, 0, 0, -32, 32, 0, 0, 160, -128, 0, -128, 0, 0, 0, 0, 0, 0, 32, 0, 0, -128,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(2, 2, 0): [0, 0, 0, 0, 0, 0, 4, -16, 0, 0, -16, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0],
(0, 3, 0): [0, 0, 0, 0, 0, 16 / 3, -16 / 3, 0, 0, -112 / 3, 32, 0, 224 / 3, -128 / 3, -128 / 3, 0, 0, 0, 0,
-16 / 3, 0, 0, 32, 0, -128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(1, 3, 0): [0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, -32, 0, 0, 128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0],
(0, 4, 0): [0, 0, 0, 0, 0, -1, 0, 0, 0, 22 / 3, 0, 0, -16, 0, 32 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0],
(0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, -208 / 3, 96, -128 / 3, -208 / 3, 192, -128,
96, -128, -128 / 3, -208 / 3, 192, -128, 192, -256, -128, 96, -128, -128, -128 / 3],
(1, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, -224, 128, 0, -224, 256, 0, 128, 0, 0, -224,
256, 0, 256, 0, 0, 128, 0, 0],
(2, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 160, -128, 0, 32, -128, 0, 0, 0, 0, 32,
-128, 0, 0, 0, 0, 0, 0, 0],
(3, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, -32, 128 / 3, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, -224, 128, -224, 256, 128, 0, 0,
0, -224, 256, 256, 0, 0, 128, 0],
(1, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 256, -256, 0, -256, 0, 0, 0, 0, 0,
-256, 0, 0, 0, 0, 0],
(2, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0],
(0, 2, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 32, 0, 160, -128, -128, 0, 0, 0,
32, 0, -128, 0, 0, 0, 0],
(1, 2, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0, 128, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0],
(0, 3, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, -32, 0, 128 / 3, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0],
(0, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -12, 28, -16, 0, 28, -32, 0, -16, 0, 0, 76, -144,
64, -144, 128, 64, -128, 128, 128, 64],
(1, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 32, 0, 0, 32, 0, 0, 0, 0, 0, 160, -128, 0,
-128, 0, 0, -128, 0, 0],
(2, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -16, 0, 0, 0, 0, 0, 0, 0, 0, -16, 64, 0, 0,
0, 0, 0, 0, 0],
(0, 1, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 32, 0, 32, 0, 0, 0, 0, 0, 160,
-128, -128, 0, 0, -128, 0],
(1, 1, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0, 0, 0, 0, 0, 0, 0, 128,
0, 0, 0, 0, 0],
(0, 2, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, -16, 0, 0, 0, 0, 0, -16, 0,
64, 0, 0, 0, 0],
(0, 0, 3): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, -16 / 3, 0, 0, -16 / 3, 0, 0, 0, 0, 0,
-112 / 3, 32, 0, 32, 0, 0, 224 / 3, -128 / 3, -128 / 3, -128 / 3],
(1, 0, 3): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0,
0, 0, 0, 128 / 3, 0, 0],
(0, 1, 3): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0,
0, 0, 0, 128 / 3, 0],
(0, 0, 4): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22 / 3, 0, 0, 0, 0,
0, -16, 0, 0, 32 / 3],
},
],
# n = 4
[
# r = 1
{
(0, 0, 0, 0): [1, -1, -1, -1, -1],
(1, 0, 0, 0): [0, 1, 0, 0, 0],
(0, 1, 0, 0): [0, 0, 1, 0, 0],
(0, 0, 1, 0): [0, 0, 0, 1, 0],
(0, 0, 0, 1): [0, 0, 0, 0, 1],
},
# r = 2
{
(0, 0, 0, 0): [1, -3, 2, -3, 4, 2, -3, 4, 4, 2, -3, 4, 4, 4, 2],
(1, 0, 0, 0): [0, 4, -4, 0, -4, 0, 0, -4, 0, 0, 0, -4, 0, 0, 0],
(2, 0, 0, 0): [0, -1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 0, 0): [0, 0, 0, 4, -4, -4, 0, 0, -4, 0, 0, 0, -4, 0, 0],
(1, 1, 0, 0): [0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 2, 0, 0): [0, 0, 0, -1, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 1, 0): [0, 0, 0, 0, 0, 0, 4, -4, -4, -4, 0, 0, 0, -4, 0],
(1, 0, 1, 0): [0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0],
(0, 0, 2, 0): [0, 0, 0, 0, 0, 0, -1, 0, 0, 2, 0, 0, 0, 0, 0],
(0, 0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -4, -4, -4, -4],
(1, 0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0],
(0, 1, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0],
(0, 0, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0],
(0, 0, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 2],
},
# r = 3
{
(0, 0, 0, 0): [1, -11 / 2, 9, -9 / 2, -11 / 2, 18, -27 / 2, 9, -27 / 2, -9 / 2, -11 / 2, 18, -27 / 2, 18,
-27, -27 / 2, 9, -27 / 2, -27 / 2, -9 / 2, -11 / 2, 18, -27 / 2, 18, -27, -27 / 2, 18, -27,
-27, -27 / 2, 9, -27 / 2, -27 / 2, -27 / 2, -9 / 2],
(1, 0, 0, 0): [0, 9, -45 / 2, 27 / 2, 0, -45 / 2, 27, 0, 27 / 2, 0, 0, -45 / 2, 27, 0, 27, 0, 0, 27 / 2,
0, 0, 0, -45 / 2, 27, 0, 27, 0, 0, 27, 0, 0, 0, 27 / 2, 0, 0, 0],
(2, 0, 0, 0): [0, -9 / 2, 18, -27 / 2, 0, 9 / 2, -27 / 2, 0, 0, 0, 0, 9 / 2, -27 / 2, 0, 0, 0, 0, 0, 0, 0,
0, 9 / 2, -27 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 0, 0, 0): [0, 1, -9 / 2, 9 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 0, 0): [0, 0, 0, 0, 9, -45 / 2, 27 / 2, -45 / 2, 27, 27 / 2, 0, 0, 0, -45 / 2, 27, 27, 0, 0,
27 / 2, 0, 0, 0, 0, -45 / 2, 27, 27, 0, 0, 27, 0, 0, 0, 27 / 2, 0, 0],
(1, 1, 0, 0): [0, 0, 0, 0, 0, 27, -27, 0, -27, 0, 0, 0, 0, 0, -27, 0, 0, 0, 0, 0, 0, 0, 0, 0, -27, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(2, 1, 0, 0): [0, 0, 0, 0, 0, -9 / 2, 27 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 2, 0, 0): [0, 0, 0, 0, -9 / 2, 9 / 2, 0, 18, -27 / 2, -27 / 2, 0, 0, 0, 9 / 2, 0, -27 / 2, 0, 0, 0, 0,
0, 0, 0, 9 / 2, 0, -27 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(1, 2, 0, 0): [0, 0, 0, 0, 0, -9 / 2, 0, 0, 27 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 3, 0, 0): [0, 0, 0, 0, 1, 0, 0, -9 / 2, 0, 9 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -45 / 2, 27 / 2, -45 / 2, 27, 27 / 2, -45 / 2, 27, 27,
27 / 2, 0, 0, 0, 0, 0, 0, -45 / 2, 27, 27, 27, 0, 0, 0, 27 / 2, 0],
(1, 0, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, -27, 0, -27, 0, 0, -27, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-27, 0, 0, 0, 0, 0, 0, 0],
(2, 0, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 27 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, -27, -27, 0, 0, -27, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-27, 0, 0, 0, 0, 0, 0],
(1, 1, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0],
(0, 2, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 27 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 2, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 9 / 2, 0, 9 / 2, 0, 0, 18, -27 / 2, -27 / 2, -27 / 2,
0, 0, 0, 0, 0, 0, 9 / 2, 0, 0, -27 / 2, 0, 0, 0, 0, 0],
(1, 0, 2, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 0, 0, 0, 0, 27 / 2, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 2, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 0, 0, 0, 27 / 2, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 3, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, -9 / 2, 0, 0, 9 / 2, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, -45 / 2, 27 / 2, -45 / 2,
27, 27 / 2, -45 / 2, 27, 27, 27 / 2, -45 / 2, 27, 27, 27, 27 / 2],
(1, 0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, -27, 0, -27, 0, 0, -27,
0, 0, 0, -27, 0, 0, 0],
(2, 0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 27 / 2, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, -27, -27, 0, 0,
-27, 0, 0, 0, -27, 0, 0],
(1, 1, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0],
(0, 2, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 27 / 2, 0,
0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, -27, -27,
-27, 0, 0, 0, -27, 0],
(1, 0, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0,
0, 0, 0, 0, 0],
(0, 1, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0,
0, 0, 0, 0, 0],
(0, 0, 2, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 0,
27 / 2, 0, 0, 0, 0, 0],
(0, 0, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 9 / 2, 0, 9 / 2, 0, 0,
9 / 2, 0, 0, 0, 18, -27 / 2, -27 / 2, -27 / 2, -27 / 2],
(1, 0, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 0, 0, 0, 0, 0, 0,
0, 0, 27 / 2, 0, 0, 0],
(0, 1, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 0, 0, 0, 0,
0, 0, 0, 27 / 2, 0, 0],
(0, 0, 1, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9 / 2, 0, 0,
0, 0, 0, 0, 27 / 2, 0],
(0, 0, 0, 3): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-9 / 2, 0, 0, 0, 9 / 2],
},
# r = 4
{
(0, 0, 0, 0): [1, -25 / 3, 70 / 3, -80 / 3, 32 / 3, -25 / 3, 140 / 3, -80, 128 / 3, 70 / 3, -80, 64,
-80 / 3, 128 / 3, 32 / 3, -25 / 3, 140 / 3, -80, 128 / 3, 140 / 3, -160, 128, -80, 128,
128 / 3, 70 / 3, -80, 64, -80, 128, 64, -80 / 3, 128 / 3, 128 / 3, 32 / 3, -25 / 3, 140 / 3,
-80, 128 / 3, 140 / 3, -160, 128, -80, 128, 128 / 3, 140 / 3, -160, 128, -160, 256, 128,
-80, 128, 128, 128 / 3, 70 / 3, -80, 64, -80, 128, 64, -80, 128, 128, 64, -80 / 3, 128 / 3,
128 / 3, 128 / 3, 32 / 3],
(1, 0, 0, 0): [0, 16, -208 / 3, 96, -128 / 3, 0, -208 / 3, 192, -128, 0, 96, -128, 0, -128 / 3, 0, 0,
-208 / 3, 192, -128, 0, 192, -256, 0, -128, 0, 0, 96, -128, 0, -128, 0, 0, -128 / 3, 0, 0,
0, -208 / 3, 192, -128, 0, 192, -256, 0, -128, 0, 0, 192, -256, 0, -256, 0, 0, -128, 0, 0,
0, 96, -128, 0, -128, 0, 0, -128, 0, 0, 0, -128 / 3, 0, 0, 0],
(2, 0, 0, 0): [0, -12, 76, -128, 64, 0, 28, -144, 128, 0, -16, 64, 0, 0, 0, 0, 28, -144, 128, 0, -32, 128,
0, 0, 0, 0, -16, 64, 0, 0, 0, 0, 0, 0, 0, 0, 28, -144, 128, 0, -32, 128, 0, 0, 0, 0, -32,
128, 0, 0, 0, 0, 0, 0, 0, 0, -16, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 0, 0, 0): [0, 16 / 3, -112 / 3, 224 / 3, -128 / 3, 0, -16 / 3, 32, -128 / 3, 0, 0, 0, 0, 0, 0, 0,
-16 / 3, 32, -128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16 / 3, 32,
-128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0],
(4, 0, 0, 0): [0, -1, 22 / 3, -16, 32 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 0, 0): [0, 0, 0, 0, 0, 16, -208 / 3, 96, -128 / 3, -208 / 3, 192, -128, 96, -128, -128 / 3, 0, 0,
0, 0, -208 / 3, 192, -128, 192, -256, -128, 0, 0, 0, 96, -128, -128, 0, 0, -128 / 3, 0, 0,
0, 0, 0, -208 / 3, 192, -128, 192, -256, -128, 0, 0, 0, 192, -256, -256, 0, 0, -128, 0, 0,
0, 0, 96, -128, -128, 0, 0, -128, 0, 0, 0, -128 / 3, 0, 0],
(1, 1, 0, 0): [0, 0, 0, 0, 0, 0, 96, -224, 128, 0, -224, 256, 0, 128, 0, 0, 0, 0, 0, 0, -224, 256, 0, 256,
0, 0, 0, 0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -224, 256, 0, 256, 0, 0, 0, 0, 0, 256, 0,
0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(2, 1, 0, 0): [0, 0, 0, 0, 0, 0, -32, 160, -128, 0, 32, -128, 0, 0, 0, 0, 0, 0, 0, 0, 32, -128, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, -128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 1, 0, 0): [0, 0, 0, 0, 0, 0, 16 / 3, -32, 128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 2, 0, 0): [0, 0, 0, 0, 0, -12, 28, -16, 0, 76, -144, 64, -128, 128, 64, 0, 0, 0, 0, 28, -32, 0, -144,
128, 128, 0, 0, 0, -16, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 28, -32, 0, -144, 128, 128, 0, 0, 0,
-32, 0, 128, 0, 0, 0, 0, 0, 0, 0, -16, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(1, 2, 0, 0): [0, 0, 0, 0, 0, 0, -32, 32, 0, 0, 160, -128, 0, -128, 0, 0, 0, 0, 0, 0, 32, 0, 0, -128, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, -128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(2, 2, 0, 0): [0, 0, 0, 0, 0, 0, 4, -16, 0, 0, -16, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 3, 0, 0): [0, 0, 0, 0, 0, 16 / 3, -16 / 3, 0, 0, -112 / 3, 32, 0, 224 / 3, -128 / 3, -128 / 3, 0, 0,
0, 0, -16 / 3, 0, 0, 32, 0, -128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16 / 3, 0,
0, 32, 0, -128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0],
(1, 3, 0, 0): [0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, -32, 0, 0, 128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 4, 0, 0): [0, 0, 0, 0, 0, -1, 0, 0, 0, 22 / 3, 0, 0, -16, 0, 32 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, -208 / 3, 96, -128 / 3, -208 / 3, 192,
-128, 96, -128, -128 / 3, -208 / 3, 192, -128, 192, -256, -128, 96, -128, -128, -128 / 3, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, -208 / 3, 192, -128, 192, -256, -128, 192, -256, -256, -128, 0,
0, 0, 0, 0, 0, 96, -128, -128, -128, 0, 0, 0, -128 / 3, 0],
(1, 0, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, -224, 128, 0, -224, 256, 0, 128, 0, 0,
-224, 256, 0, 256, 0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -224, 256, 0, 256, 0,
0, 256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 0, 0, 0, 0],
(2, 0, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 160, -128, 0, 32, -128, 0, 0, 0, 0,
32, -128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, -128, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 0, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, -32, 128 / 3, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, -224, 128, -224, 256, 128, 0,
0, 0, -224, 256, 256, 0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -224, 256, 256,
0, 0, 256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 0, 0, 0],
(1, 1, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 256, -256, 0, -256, 0, 0, 0, 0,
0, -256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -256, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(2, 1, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 128, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 2, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 32, 0, 160, -128, -128, 0, 0,
0, 32, 0, -128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, -128, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(1, 2, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0, 128, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 3, 1, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, -32, 0, 128 / 3, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 2, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -12, 28, -16, 0, 28, -32, 0, -16, 0, 0, 76,
-144, 64, -144, 128, 64, -128, 128, 128, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, -32, 0, -32,
0, 0, -144, 128, 128, 128, 0, 0, 0, 0, 0, 0, -16, 0, 0, 64, 0, 0, 0, 0, 0],
(1, 0, 2, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 32, 0, 0, 32, 0, 0, 0, 0, 0, 160,
-128, 0, -128, 0, 0, -128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, -128,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(2, 0, 2, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, -16, 0, 0, 0, 0, 0, 0, 0, 0, -16, 64, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 2, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 32, 0, 32, 0, 0, 0, 0, 0,
160, -128, -128, 0, 0, -128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, -128,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(1, 1, 2, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0, 0, 0, 0, 0, 0, 0,
128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 2, 2, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, -16, 0, 0, 0, 0, 0, -16,
0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 3, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, -16 / 3, 0, 0, -16 / 3, 0, 0, 0, 0, 0,
-112 / 3, 32, 0, 32, 0, 0, 224 / 3, -128 / 3, -128 / 3, -128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, -16 / 3, 0, 0, 0, 0, 0, 32, 0, 0, -128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(1, 0, 3, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0,
0, 0, 0, 0, 128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 3, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, 0, 0, 0, 0, 0,
-32, 0, 0, 0, 0, 128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 4, 0): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22 / 3, 0, 0,
0, 0, 0, -16, 0, 0, 32 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 16, -208 / 3, 96, -128 / 3, -208 / 3, 192, -128, 96, -128, -128 / 3,
-208 / 3, 192, -128, 192, -256, -128, 96, -128, -128, -128 / 3, -208 / 3, 192, -128, 192,
-256, -128, 192, -256, -256, -128, 96, -128, -128, -128, -128 / 3],
(1, 0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 96, -224, 128, 0, -224, 256, 0, 128, 0, 0, -224, 256, 0, 256, 0, 0, 128,
0, 0, 0, -224, 256, 0, 256, 0, 0, 256, 0, 0, 0, 128, 0, 0, 0],
(2, 0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, -32, 160, -128, 0, 32, -128, 0, 0, 0, 0, 32, -128, 0, 0, 0, 0, 0, 0, 0, 0,
32, -128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(3, 0, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 16 / 3, -32, 128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 96, -224, 128, -224, 256, 128, 0, 0, 0, -224, 256, 256, 0, 0,
128, 0, 0, 0, 0, -224, 256, 256, 0, 0, 256, 0, 0, 0, 128, 0, 0],
(1, 1, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 256, -256, 0, -256, 0, 0, 0, 0, 0, -256, 0, 0, 0, 0, 0, 0, 0,
0, 0, -256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(2, 1, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 2, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 32, 0, 160, -128, -128, 0, 0, 0, 32, 0, -128, 0, 0, 0, 0, 0,
0, 0, 32, 0, -128, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(1, 2, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 3, 0, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, -32, 0, 128 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, -224, 128, -224, 256, 128, -224, 256, 256,
128, 0, 0, 0, 0, 0, 0, -224, 256, 256, 256, 0, 0, 0, 128, 0],
(1, 0, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 256, -256, 0, -256, 0, 0, -256, 0, 0, 0, 0,
0, 0, 0, 0, 0, -256, 0, 0, 0, 0, 0, 0, 0],
(2, 0, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 256, -256, -256, 0, 0, -256, 0, 0, 0,
0, 0, 0, 0, 0, 0, -256, 0, 0, 0, 0, 0, 0],
(1, 1, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 2, 1, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 128, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 2, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 32, 0, 32, 0, 0, 160, -128, -128, -128, 0,
0, 0, 0, 0, 0, 32, 0, 0, -128, 0, 0, 0, 0, 0],
(1, 0, 2, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0, 0, 0, 0, 128, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 2, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0, 0, 0, 128, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 3, 1): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, 0, 0, -32, 0, 0, 128 / 3, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, -12, 28, -16, 0, 28, -32, 0, -16, 0, 0, 28, -32, 0, -32, 0, 0, -16, 0, 0, 0,
76, -144, 64, -144, 128, 64, -144, 128, 128, 64, -128, 128, 128, 128, 64],
(1, 0, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, -32, 32, 0, 0, 32, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 160,
-128, 0, -128, 0, 0, -128, 0, 0, 0, -128, 0, 0, 0],
(2, 0, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 4, -16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16, 64, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 32, 0, 32, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0,
160, -128, -128, 0, 0, -128, 0, 0, 0, -128, 0, 0],
(1, 1, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 2, 0, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, -16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -16,
0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0],
(0, 0, 1, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 32, 0, 32, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0,
0, 0, 160, -128, -128, -128, 0, 0, 0, -128, 0],
(1, 0, 1, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 128, 0, 0, 0, 0, 0, 0, 0],
(0, 1, 1, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 128, 0, 0, 0, 0, 0, 0],
(0, 0, 2, 2): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, -16, 0, 0, 0, 0, 0, 0, 0, 0,
0, -16, 0, 0, 64, 0, 0, 0, 0, 0],
(0, 0, 0, 3): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 16 / 3, -16 / 3, 0, 0, -16 / 3, 0, 0, 0, 0, 0, -16 / 3, 0, 0, 0, 0, 0, 0, 0,
0, 0, -112 / 3, 32, 0, 32, 0, 0, 32, 0, 0, 0, 224 / 3, -128 / 3, -128 / 3, -128 / 3,
-128 / 3],
(1, 0, 0, 3): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -32, 0,
0, 0, 0, 0, 0, 0, 0, 0, 128 / 3, 0, 0, 0],
(0, 1, 0, 3): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-32, 0, 0, 0, 0, 0, 0, 0, 0, 128 / 3, 0, 0],
(0, 0, 1, 3): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16 / 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, -32, 0, 0, 0, 0, 0, 0, 128 / 3, 0],
(0, 0, 0, 4): [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22 / 3, 0, 0, 0,
0, 0, 0, 0, 0, 0, -16, 0, 0, 0, 32 / 3],
},
],
]
| 81.277674
| 120
| 0.274855
| 9,875
| 43,321
| 1.205468
| 0.004557
| 1.078293
| 1.438004
| 1.732191
| 0.985467
| 0.981939
| 0.974882
| 0.96707
| 0.953965
| 0.941616
| 0
| 0.499068
| 0.455091
| 43,321
| 532
| 121
| 81.430451
| 0.005211
| 0.004755
| 0
| 0.147638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
aebbeaa149a6075e6b2b08f918171bd6a478ac68
| 1,377
|
py
|
Python
|
Sets.py
|
krishnakesari/Python-Fund
|
81199325630c41b288f32ecdde6a90919edd9f4b
|
[
"MIT"
] | null | null | null |
Sets.py
|
krishnakesari/Python-Fund
|
81199325630c41b288f32ecdde6a90919edd9f4b
|
[
"MIT"
] | null | null | null |
Sets.py
|
krishnakesari/Python-Fund
|
81199325630c41b288f32ecdde6a90919edd9f4b
|
[
"MIT"
] | null | null | null |
def main():
a = set("I am fine")
b = set("I am ok")
print_set(sorted(a))
print_set(sorted(b))
def print_set(o):
print('{', end = ' ')
for x in o: print(x, end = ' ')
print('}')
if __name__ == '__main__': main()
# Members in set a but not b
def main():
a = set("I am fine")
b = set("I am ok")
print_set(a - b) # Members are in a but not b
def print_set(o):
print('Members with a but not b{', end = ' ')
for x in o: print(x, end = ' ')
print('}')
if __name__ == '__main__': main()
# Members in set a or b or both
def main():
a = set("I am fine")
b = set("I am ok")
print_set(a | b)
def print_set(o):
print('Members with a or b or both: {', end = ' ')
for x in o: print(x, end = ' ')
print('}')
if __name__ == '__main__': main()
# Members in set a or b not both
def main():
a = set("I am fine")
b = set("I am ok")
print_set(a ^ b)
def print_set(o):
print('Members with a or b but not both: {', end = ' ')
for x in o: print(x, end = ' ')
print('}')
if __name__ == '__main__': main()
# Members in both set a and b
def main():
a = set("I am fine")
b = set("I am ok")
print_set(a & b)
def print_set(o):
print('Members with both a and b are: {', end = ' ')
for x in o: print(x, end = ' ')
print('}')
if __name__ == '__main__': main()
| 18.863014
| 60
| 0.526507
| 239
| 1,377
| 2.820084
| 0.108787
| 0.130564
| 0.089021
| 0.081602
| 0.893175
| 0.867953
| 0.841246
| 0.841246
| 0.841246
| 0.796736
| 0
| 0
| 0.293391
| 1,377
| 72
| 61
| 19.125
| 0.692703
| 0.104575
| 0
| 0.76087
| 0
| 0
| 0.213235
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.217391
| false
| 0
| 0
| 0
| 0.217391
| 0.565217
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
aecba7a8fcc8f2ab59bc0965d725ff03f75b15c1
| 5,447
|
py
|
Python
|
lookahead/runners/portfolio_runners.py
|
ericlee0803/lookahead_release
|
373295f11be81d82b1c69eeadeec32ae96f26b1f
|
[
"MIT"
] | 3
|
2020-06-17T20:25:12.000Z
|
2020-11-24T17:21:59.000Z
|
lookahead/runners/portfolio_runners.py
|
ericlee0803/lookahead_release
|
373295f11be81d82b1c69eeadeec32ae96f26b1f
|
[
"MIT"
] | null | null | null |
lookahead/runners/portfolio_runners.py
|
ericlee0803/lookahead_release
|
373295f11be81d82b1c69eeadeec32ae96f26b1f
|
[
"MIT"
] | null | null | null |
import numpy as np
from lookahead.runners.bayesian_optimization import BayesianOptimization
from lookahead.acquisitions.rollout_portfolio import RolloutPortfolio, RolloutPortfolioEI
from lookahead.model.gaussian_process import GaussianProcessSimple as GaussianProcess
import os
import csv
class PortfolioRunner(BayesianOptimization):
"""
A runner class for Portfolio, Diagnostic because it outputs auxillary data.
"""
def __init__(self, search_space, horizon):
super().__init__(search_space)
self.horizon = horizon
self.opt_name = 'portfolio' + str(horizon)
def run(self, f, seed, budget_minus_initialization, initialization_duration=5):
acquisition_chosen_all = []
# Warm start with 5 points, with fixed random seed
np.random.seed(seed)
d = len(self.search_space.domain_bounds)
xhist = np.random.rand(5, d)
yhist = f(xhist)
self.gaussian_process = GaussianProcess(xhist, yhist)
self.gaussian_process.train()
while budget_minus_initialization > 0:
# Get next sample point
xsample, acquisition_chosen = self.get_next_point()
acquisition_chosen_all.append(acquisition_chosen)
ysample = f(xsample)
xhist = np.vstack((xhist, xsample))
yhist = np.append(yhist, ysample)
self.gaussian_process = GaussianProcess(xhist, yhist)
self.gaussian_process.train()
budget_minus_initialization -= 1
xhist, yhist = self.gaussian_process.get_historical_data()
# Save BOTH acquisitions chosen as well as run information
self.save_auxillary_data(acquisition_chosen_all, str(f.__name__), seed)
self.save_bo_run(yhist, str(f.__name__), seed)
def get_next_point(self):
# To be implemented by each acquisition function
pr = RolloutPortfolio(self.gaussian_process, self.search_space, self.horizon)
return pr.next_point()
def save_auxillary_data(self, acquisition_chosen_all, objective_name, seed):
seed = str(seed)
"""
Saves run to the folder ~/Look-Ahead/results/optimizer_name/objective_name/seed.csv
"""
base_path = os.path.expanduser('~') + '/Look-Ahead/results/'
# Make paths if necessary
if not os.path.exists(base_path):
os.makedirs(base_path)
path = base_path + self.opt_name + '/'
if not os.path.exists(path):
os.makedirs(path)
path = path + objective_name + '/'
if not os.path.exists(path):
os.makedirs(path)
run_name = path + 'portfolio_aux' + str(seed) + '.csv'
# Save data as csv to path
with open(run_name, 'w') as file:
writer = csv.writer(file, delimiter='\n')
writer.writerow(acquisition_chosen_all)
class PortfolioEIRunner(BayesianOptimization):
"""
A runner class for Portfolio, Diagnostic because it outputs auxillary data.
"""
def __init__(self, search_space, horizon):
super().__init__(search_space)
self.horizon = horizon
self.opt_name = 'portfolio_ei' + str(horizon)
def run(self, f, seed, budget_minus_initialization, initialization_duration=5):
acquisition_chosen_all = []
# Warm start with 5 points, with fixed random seed
np.random.seed(seed)
d = len(self.search_space.domain_bounds)
xhist = np.random.rand(5, d)
yhist = f(xhist)
self.gaussian_process = GaussianProcess(xhist, yhist)
self.gaussian_process.train()
while budget_minus_initialization > 0:
# Get next sample point
xsample, acquisition_chosen = self.get_next_point()
acquisition_chosen_all.append(acquisition_chosen)
ysample = f(xsample)
xhist = np.vstack((xhist, xsample))
yhist = np.append(yhist, ysample)
self.gaussian_process = GaussianProcess(xhist, yhist)
self.gaussian_process.train()
budget_minus_initialization -= 1
# Save BOTH acquisitions chosen as well as run information
xhist, yhist = self.gaussian_process.get_historical_data()
self.save_auxillary_data(acquisition_chosen_all, str(f.__name__), seed)
self.save_bo_run(yhist, str(f.__name__), seed)
def get_next_point(self):
# To be implemented by each acquisition function
pr = RolloutPortfolioEI(self.gaussian_process, self.search_space, self.horizon)
return pr.next_point()
def save_auxillary_data(self, acquisition_chosen_all, objective_name, seed):
seed = str(seed)
"""
Saves run to the folder ~/Look-Ahead/results/optimizer_name/objective_name/seed.csv
"""
base_path = os.path.expanduser('~') + '/Look-Ahead/results/'
# Make paths if necessary
if not os.path.exists(base_path):
os.makedirs(base_path)
path = base_path + self.opt_name + '/'
if not os.path.exists(path):
os.makedirs(path)
path = path + objective_name + '/'
if not os.path.exists(path):
os.makedirs(path)
run_name = path + 'portfolio_aux' + str(seed) + '.csv'
# Save data as csv to path
with open(run_name, 'w') as file:
writer = csv.writer(file, delimiter='\n')
writer.writerow(acquisition_chosen_all)
| 38.359155
| 91
| 0.651001
| 651
| 5,447
| 5.219662
| 0.185868
| 0.070041
| 0.067098
| 0.038846
| 0.901707
| 0.901707
| 0.901707
| 0.901707
| 0.874632
| 0.84638
| 0
| 0.002466
| 0.255554
| 5,447
| 141
| 92
| 38.631206
| 0.835512
| 0.11052
| 0
| 0.869565
| 0
| 0
| 0.023317
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.065217
| 0
| 0.195652
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9dc71914ec02b2c23d3ca4aa86a26a4e1fe02e9e
| 22,984
|
py
|
Python
|
web/transiq/restapi/serializers/file_upload.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
web/transiq/restapi/serializers/file_upload.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | 14
|
2020-06-05T23:06:45.000Z
|
2022-03-12T00:00:18.000Z
|
web/transiq/restapi/serializers/file_upload.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime
from django.contrib.auth.models import User
from rest_framework import serializers, ISO_8601
from rest_framework.validators import UniqueValidator, UniqueTogetherValidator
from api import s3util
from api.models import S3Upload
from api.utils import get_ext
from driver.models import Driver
from fileupload.models import PODFile, VehicleFile, OwnerFile, DriverFile, ChequeFile, InvoiceReceiptFile, WeighingSlip
from owner.models import Vehicle, Owner
from restapi.helper_api import DATE_FORMAT, DATETIME_FORMAT
from restapi.serializers.api import S3UploadSerializer
from restapi.serializers.authentication import UserSerializer
from restapi.serializers.driver import DriverSerializer
from restapi.serializers.owner import OwnerSerializer, VehicleSerializer
from restapi.serializers.sme import SmeSerializer
from restapi.serializers.team import LrNumberSerializer, ManualBookingSerializer, InvoiceSerializer
from sme.models import Sme
from team.models import LrNumber, ManualBooking, Invoice
class BasicPODFileSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
url = serializers.SerializerMethodField()
lr = serializers.SerializerMethodField()
booking = serializers.SerializerMethodField()
def get_lr(self, instance):
if isinstance(instance.lr_number, LrNumber):
return {'id': instance.lr_number.id, 'lr_number': instance.lr_number.lr_number}
return {}
def get_booking(self, instance):
if isinstance(instance.booking, ManualBooking):
return {'id': instance.booking.id, 'booking_id': instance.booking.booking_id}
return {}
def get_url(self, instance):
if isinstance(instance, PODFile) and isinstance(instance.s3_upload, S3Upload):
return instance.s3_upload.public_url()
return None
def create(self, validated_data):
pass
def update(self, instance, validated_data):
pass
class PODFileSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
s3_thumb_url = serializers.URLField(allow_null=True, max_length=200, required=False,
validators=[UniqueValidator(queryset=PODFile.objects.all())])
serial = serializers.CharField(max_length=20)
s3_url = serializers.URLField(required=False)
verified = serializers.BooleanField(default=False)
is_valid = serializers.BooleanField(default=False)
verified_datetime = serializers.DateTimeField(
allow_null=True, required=False, format=DATE_FORMAT, input_formats=DATETIME_FORMAT)
created_on = serializers.DateTimeField(read_only=True, format=DATE_FORMAT, input_formats=DATE_FORMAT)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
uploaded_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
verified_by = serializers.SlugRelatedField(allow_null=True, queryset=User.objects.all(), required=False,
slug_field="username")
lr_number = serializers.PrimaryKeyRelatedField(write_only=True, allow_null=True, queryset=LrNumber.objects.all(),
required=True)
booking = serializers.PrimaryKeyRelatedField(write_only=True, queryset=ManualBooking.objects.all())
s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all(), write_only=True, required=False)
s3_upload_url = serializers.SerializerMethodField()
# upload_file = serializers.SerializerMethodField()
lr_number_data = serializers.SerializerMethodField()
booking_id = serializers.SerializerMethodField()
def get_lr_number_data(self, instance):
if isinstance(instance.lr_number, LrNumber):
return instance.lr_number.lr_number
return None
def get_booking_id(self, instance):
if isinstance(instance.booking, ManualBooking):
return instance.booking.booking_id
return None
class Meta:
validators = [UniqueTogetherValidator(queryset=PODFile.objects.all(), fields=('lr_number', 'serial'))]
def validate_created_by(self, value):
if isinstance(self.instance, PODFile) and value:
raise serializers.ValidationError("Created by is immutable")
return value
def validate_uploaded_by(self, value):
if isinstance(self.instance, PODFile) and value:
raise serializers.ValidationError("Uploaded by is immutable")
return value
def get_s3_upload_url(self, instance):
if isinstance(instance, PODFile) and isinstance(instance.s3_upload, S3Upload):
return instance.s3_upload.public_url()
return None
def create(self, validated_data):
instance = PODFile.objects.create(**validated_data)
if isinstance(instance.booking, ManualBooking):
ManualBooking.objects.filter(id=instance.booking.id).update(
pod_status='unverified', pod_date=datetime.now())
return instance
def update(self, instance, validated_data):
PODFile.objects.filter(id=instance.id).update(**validated_data)
return PODFile.objects.get(id=instance.id)
class WeighingSlipSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
s3_thumb_url = serializers.URLField(allow_null=True, max_length=200, required=False,
validators=[UniqueValidator(queryset=WeighingSlip.objects.all())])
serial = serializers.CharField(max_length=20)
s3_url = serializers.URLField(required=False)
verified = serializers.BooleanField(default=False)
is_valid = serializers.BooleanField(default=False)
verified_datetime = serializers.DateTimeField(
allow_null=True, required=False, format=DATE_FORMAT, input_formats=DATETIME_FORMAT)
created_on = serializers.DateTimeField(read_only=True, format=DATE_FORMAT, input_formats=DATE_FORMAT)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
uploaded_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
verified_by = serializers.SlugRelatedField(allow_null=True, queryset=User.objects.all(), required=False,
slug_field="username")
booking = serializers.PrimaryKeyRelatedField(write_only=True, queryset=ManualBooking.objects.all())
s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all(), write_only=True, required=False)
s3_upload_url = serializers.SerializerMethodField()
# upload_file = serializers.SerializerMethodField()
lr_number_data = serializers.SerializerMethodField()
booking_id = serializers.SerializerMethodField()
def get_lr_number_data(self, instance):
if isinstance(instance.lr_number, LrNumber):
return instance.lr_number.lr_number
return None
def get_booking_id(self, instance):
if isinstance(instance.booking, ManualBooking):
return instance.booking.booking_id
return None
def validate_created_by(self, value):
if isinstance(self.instance, WeighingSlip) and value:
raise serializers.ValidationError("Created by is immutable")
return value
def validate_uploaded_by(self, value):
if isinstance(self.instance, WeighingSlip) and value:
raise serializers.ValidationError("Uploaded by is immutable")
return value
def get_s3_upload_url(self, instance):
if isinstance(instance, WeighingSlip) and isinstance(instance.s3_upload, S3Upload):
return instance.s3_upload.public_url()
return None
def create(self, validated_data):
instance = WeighingSlip.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
WeighingSlip.objects.filter(id=instance.id).update(**validated_data)
return WeighingSlip.objects.get(id=instance.id)
class VehicleFileSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
document_category = serializers.ChoiceField(choices=(
('PUC', 'Puc Certificate'), ('FIT', 'Fitness Certificate'), ('REG', 'Registration Certificate'),
('PERM', 'Permission Certificate'), ('INS', 'Insurance Certificate')))
s3_url = serializers.URLField(max_length=200, validators=[UniqueValidator(queryset=VehicleFile.objects.all())])
s3_thumb_url = serializers.URLField(allow_null=True, max_length=200, required=False,
validators=[UniqueValidator(queryset=VehicleFile.objects.all())])
serial = serializers.CharField(max_length=20)
verified = serializers.BooleanField()
is_valid = serializers.BooleanField()
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
uploaded_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
vehicle = serializers.PrimaryKeyRelatedField(queryset=Vehicle.objects.all(),required=False)
s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all())
# class Meta:
# validators = [UniqueTogetherValidator(queryset=VehicleFile.objects.all(), fields=('vehicle', 'serial'))]
def validate_created_by(self, value):
if isinstance(self.instance, VehicleFile) and value:
raise serializers.ValidationError("Created by is immutable")
return value
def validate_uploaded_by(self, value):
if isinstance(self.instance, VehicleFile) and value:
raise serializers.ValidationError("Uploaded by is immutable")
return value
def to_representation(self, instance):
self.fields["vehicle"] = VehicleSerializer(read_only=True)
self.fields["booking"] = ManualBookingSerializer(read_only=True)
self.fields["s3_upload"] = S3UploadSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = VehicleFile.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
VehicleFile.objects.filter(id=instance.id).update(**validated_data)
return VehicleFile.objects.get(id=instance.id)
class OwnerFileSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
document_category = serializers.ChoiceField(choices=(
('PAN', 'PAN Card'), ('DL', 'Driving Licence'), ('EL', 'Election ID'), ('AC', 'Aadhar Card'),
('PT', 'Passport'),
('RC', 'Ration Card'), ('DEC', 'Declaration')))
s3_url = serializers.URLField(max_length=200, validators=[UniqueValidator(queryset=OwnerFile.objects.all())])
s3_thumb_url = serializers.URLField(allow_null=True, max_length=200, required=False,
validators=[UniqueValidator(queryset=OwnerFile.objects.all())])
serial = serializers.CharField(max_length=20, required=True)
verified = serializers.BooleanField(required=False)
is_valid = serializers.BooleanField()
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
uploaded_by = serializers.SlugRelatedField(allow_null=True, queryset=User.objects.all(), required=False,
slug_field="username")
owner = serializers.PrimaryKeyRelatedField(queryset=Owner.objects.all(),allow_null=True, required=False)
s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all())
# class Meta:
# validators = [UniqueTogetherValidator(queryset=OwnerFile.objects.all(), fields=('owner', 'serial'))]
def validate_created_by(self, value):
if isinstance(self.instance, OwnerFile) and value:
raise serializers.ValidationError("Created by is immutable")
return value
def validate_uploaded_by(self, value):
if isinstance(self.instance, OwnerFile) and value:
raise serializers.ValidationError("Uploaded by is immutable")
return value
def to_representation(self, instance):
self.fields["owner"] = OwnerSerializer(read_only=True)
self.fields["s3_upload"] = S3UploadSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = OwnerFile.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
OwnerFile.objects.filter(id=instance.id).update(**validated_data)
return OwnerFile.objects.get(id=instance.id)
class DriverFileSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
document_category = serializers.ChoiceField(allow_null=True, choices=(
('PAN', 'PAN Card'), ('DL', 'Driving Licence'), ('EL', 'Election ID'), ('AC', 'Aadhar Card'),
('PT', 'Passport'),
('RC', 'Ration Card')), required=False)
s3_url = serializers.URLField(max_length=200, validators=[UniqueValidator(queryset=DriverFile.objects.all())])
s3_thumb_url = serializers.URLField(allow_null=True, max_length=200, required=False,
validators=[UniqueValidator(queryset=DriverFile.objects.all())])
verified = serializers.BooleanField()
is_valid = serializers.BooleanField()
serial = serializers.CharField(max_length=20)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
uploaded_by = serializers.SlugRelatedField(allow_null=True, queryset=User.objects.all(), required=False,
slug_field="username")
driver = serializers.PrimaryKeyRelatedField(queryset=Driver.objects.all(), required=False)
s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all())
# class Meta:
# validators = [UniqueTogetherValidator(queryset=DriverFile.objects.all(), fields=('driver', 'serial'))]
def validate_created_by(self, value):
if isinstance(self.instance, DriverFile) and value:
raise serializers.ValidationError("Created by is immutable")
return value
def validate_uploaded_by(self, value):
if isinstance(self.instance, DriverFile) and value:
raise serializers.ValidationError("Uploaded by is immutable")
return value
def to_representation(self, instance):
self.fields["driver"] = DriverSerializer(read_only=True)
self.fields["s3_upload"] = S3UploadSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = DriverFile.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
DriverFile.objects.filter(id=instance.id).update(**validated_data)
return DriverFile.objects.get(id=instance.id)
class ChequeFileSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
s3_url = serializers.URLField(allow_null=True, max_length=200, required=False,
validators=[UniqueValidator(queryset=ChequeFile.objects.all())])
resolved_datetime = serializers.DateTimeField(allow_null=True, required=False)
customer_name = serializers.CharField(max_length=300)
amount = serializers.IntegerField(max_value=50000000, min_value=0, required=False)
cheque_number = serializers.CharField(max_length=6, min_length=6)
cheque_date = serializers.DateField(format=DATE_FORMAT, input_formats=[DATE_FORMAT, ISO_8601])
remarks = serializers.CharField(allow_null=True, max_length=300, required=False)
is_valid = serializers.BooleanField()
resolved = serializers.BooleanField(required=False)
serial = serializers.CharField(max_length=20, required=True)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
uploaded_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
resolved_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username")
customer = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Sme.objects.all(), required=False)
s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all())
public_url = serializers.SerializerMethodField()
class Meta:
validators = [UniqueTogetherValidator(queryset=ChequeFile.objects.all(), fields=('customer_name', 'serial'))]
def validate_created_by(self, value):
if isinstance(self.instance, ChequeFile) and value:
raise serializers.ValidationError("Created by is immutable")
return value
def get_public_url(self, instance):
if isinstance(instance, ChequeFile) and isinstance(instance.s3_upload, S3Upload):
return instance.s3_upload.public_url()
return None
def validate_uploaded_by(self, value):
if isinstance(self.instance, ChequeFile) and value:
raise serializers.ValidationError("Uploaded by is immutable")
return value
def to_representation(self, instance):
# self.fields["customer"] = SmeSerializer(read_only=True)
self.fields["s3_upload"] = S3UploadSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = ChequeFile.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
ChequeFile.objects.filter(id=instance.id).update(**validated_data)
return ChequeFile.objects.get(id=instance.id)
class InvoiceReceiptFileSerializer(serializers.Serializer):
id = serializers.IntegerField(label='ID', read_only=True)
invoice_number = serializers.CharField(max_length=50, required=False)
verified = serializers.BooleanField(default=False)
is_valid = serializers.BooleanField(default=False)
serial = serializers.CharField(max_length=20, required=False)
invoice_sent_mode = serializers.CharField(allow_null=True, allow_blank=True, max_length=20, required=False)
invoice_confirm_mode = serializers.CharField(allow_null=True, max_length=20, required=False)
invoice_confirm_by_name = serializers.CharField(allow_null=True, max_length=20, required=False)
invoice_confirm_by_phone = serializers.CharField(allow_null=True, allow_blank=True, max_length=20, required=False)
created_on = serializers.DateTimeField(read_only=True)
updated_on = serializers.DateTimeField(read_only=True)
deleted = serializers.BooleanField(required=False)
deleted_on = serializers.DateTimeField(allow_null=True, required=False)
created_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
uploaded_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username")
invoice_receipt = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Invoice.objects.all(),
required=False)
s3_upload = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=S3Upload.objects.all(), required=False)
def validate_created_by(self, value):
if isinstance(self.instance, InvoiceReceiptFile) and value:
raise serializers.ValidationError("Created by is immutable")
return value
def validate_uploaded_by(self, value):
if isinstance(self.instance, InvoiceReceiptFile) and value:
raise serializers.ValidationError("Uploaded by is immutable")
return value
def validate_deleted(self, attrs):
if isinstance(self.instance, InvoiceReceiptFile) and not attrs:
if InvoiceReceiptFile.objects.filter(invoice_number=self.instance.invoice_number):
raise serializers.ValidationError("Invoice number must be unique")
return attrs
def to_representation(self, instance):
# self.fields["invoice_receipt"] = InvoiceSerializer(read_only=True)
self.fields["s3_upload"] = S3UploadSerializer(read_only=True)
return super().to_representation(instance=instance)
def create(self, validated_data):
instance = InvoiceReceiptFile.objects.create(**validated_data)
return instance
def update(self, instance, validated_data):
InvoiceReceiptFile.objects.filter(id=instance.id).update(**validated_data)
return InvoiceReceiptFile.objects.get(id=instance.id)
| 50.514286
| 119
| 0.730421
| 2,485
| 22,984
| 6.601207
| 0.078068
| 0.044379
| 0.02414
| 0.032187
| 0.824738
| 0.80139
| 0.763716
| 0.750061
| 0.722812
| 0.698183
| 0
| 0.0068
| 0.16816
| 22,984
| 454
| 120
| 50.625551
| 0.851195
| 0.025191
| 0
| 0.678873
| 0
| 0
| 0.044119
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.129577
| false
| 0.011268
| 0.053521
| 0
| 0.738028
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
9dd959fb807f1a582c1d36953f419fa877c902fa
| 6,666
|
py
|
Python
|
yolo3/models/yolo3_peleenet.py
|
holajoa/keras-YOLOv3-model-set
|
c15b8a2f48371c063f6482b25593dc70d5956323
|
[
"MIT"
] | 601
|
2019-08-24T10:14:52.000Z
|
2022-03-29T15:05:33.000Z
|
yolo3/models/yolo3_peleenet.py
|
holajoa/keras-YOLOv3-model-set
|
c15b8a2f48371c063f6482b25593dc70d5956323
|
[
"MIT"
] | 220
|
2019-10-04T18:57:59.000Z
|
2022-03-31T15:30:37.000Z
|
yolo3/models/yolo3_peleenet.py
|
holajoa/keras-YOLOv3-model-set
|
c15b8a2f48371c063f6482b25593dc70d5956323
|
[
"MIT"
] | 218
|
2019-10-31T03:32:11.000Z
|
2022-03-25T14:44:19.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""YOLO_v3 PeleeNet Model Defined in Keras."""
from tensorflow.keras.layers import UpSampling2D, Concatenate
from tensorflow.keras.models import Model
from common.backbones.peleenet import PeleeNet
from yolo3.models.layers import yolo3_predictions, yolo3lite_predictions, tiny_yolo3_predictions, tiny_yolo3lite_predictions
from yolo3.models.ultralite_layers import yolo3_ultralite_predictions, tiny_yolo3_ultralite_predictions
def yolo3_peleenet_body(inputs, num_anchors, num_classes):
"""Create YOLO_V3 PeleeNet model CNN body in Keras."""
peleenet = PeleeNet(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(peleenet.layers)))
# input: 416 x 416 x 3
# re_lu_338(layer 365, final feature map): 13 x 13 x 704
# re_lu_307(layer 265, end of stride 16) : 26 x 26 x 512
# re_lu_266(layer 133, end of stride 8) : 52 x 52 x 256
# NOTE: activation layer name may different for TF1.x/2.x, so we
# use index to fetch layer
# f1: 13 x 13 x 704
f1 = peleenet.layers[365].output
# f2: 26 x 26 x 512
f2 = peleenet.layers[265].output
# f3: 52 x 52 x 256
f3 = peleenet.layers[133].output
f1_channel_num = 704
f2_channel_num = 512
f3_channel_num = 256
y1, y2, y3 = yolo3_predictions((f1, f2, f3), (f1_channel_num, f2_channel_num, f3_channel_num), num_anchors, num_classes)
return Model(inputs = inputs, outputs=[y1,y2,y3])
def yolo3lite_peleenet_body(inputs, num_anchors, num_classes):
'''Create YOLO_v3 Lite PeleeNet model CNN body in keras.'''
peleenet = PeleeNet(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(peleenet.layers)))
# input: 416 x 416 x 3
# re_lu_338(layer 365, final feature map): 13 x 13 x 704
# re_lu_307(layer 265, end of stride 16) : 26 x 26 x 512
# re_lu_266(layer 133, end of stride 8) : 52 x 52 x 256
# NOTE: activation layer name may different for TF1.x/2.x, so we
# use index to fetch layer
# f1: 13 x 13 x 704
f1 = peleenet.layers[365].output
# f2: 26 x 26 x 512
f2 = peleenet.layers[265].output
# f3: 52 x 52 x 256
f3 = peleenet.layers[133].output
f1_channel_num = 704
f2_channel_num = 512
f3_channel_num = 256
y1, y2, y3 = yolo3lite_predictions((f1, f2, f3), (f1_channel_num, f2_channel_num, f3_channel_num), num_anchors, num_classes)
return Model(inputs = inputs, outputs=[y1,y2,y3])
def tiny_yolo3_peleenet_body(inputs, num_anchors, num_classes):
'''Create Tiny YOLO_v3 PeleeNet model CNN body in keras.'''
peleenet = PeleeNet(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(peleenet.layers)))
# input: 416 x 416 x 3
# re_lu_338(layer 365, final feature map): 13 x 13 x 704
# re_lu_307(layer 265, end of stride 16) : 26 x 26 x 512
# re_lu_266(layer 133, end of stride 8) : 52 x 52 x 256
# NOTE: activation layer name may different for TF1.x/2.x, so we
# use index to fetch layer
# f1: 13 x 13 x 704
f1 = peleenet.layers[365].output
# f2: 26 x 26 x 512
f2 = peleenet.layers[265].output
# f3: 52 x 52 x 256
f3 = peleenet.layers[133].output
f1_channel_num = 704
f2_channel_num = 512
y1, y2 = tiny_yolo3_predictions((f1, f2), (f1_channel_num, f2_channel_num), num_anchors, num_classes)
return Model(inputs, [y1,y2])
def tiny_yolo3lite_peleenet_body(inputs, num_anchors, num_classes):
'''Create Tiny YOLO_v3 Lite PeleeNet model CNN body in keras.'''
peleenet = PeleeNet(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(peleenet.layers)))
# input: 416 x 416 x 3
# re_lu_338(layer 365, final feature map): 13 x 13 x 704
# re_lu_307(layer 265, end of stride 16) : 26 x 26 x 512
# re_lu_266(layer 133, end of stride 8) : 52 x 52 x 256
# NOTE: activation layer name may different for TF1.x/2.x, so we
# use index to fetch layer
# f1: 13 x 13 x 704
f1 = peleenet.layers[365].output
# f2: 26 x 26 x 512
f2 = peleenet.layers[265].output
# f3: 52 x 52 x 256
f3 = peleenet.layers[133].output
f1_channel_num = 704
f2_channel_num = 512
y1, y2 = tiny_yolo3lite_predictions((f1, f2), (f1_channel_num, f2_channel_num), num_anchors, num_classes)
return Model(inputs, [y1,y2])
def yolo3_ultralite_peleenet_body(inputs, num_anchors, num_classes):
'''Create YOLO_v3 Ultra-Lite PeleeNet model CNN body in keras.'''
peleenet = PeleeNet(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(peleenet.layers)))
# input: 416 x 416 x 3
# re_lu_338(layer 365, final feature map): 13 x 13 x 704
# re_lu_307(layer 265, end of stride 16) : 26 x 26 x 512
# re_lu_266(layer 133, end of stride 8) : 52 x 52 x 256
# NOTE: activation layer name may different for TF1.x/2.x, so we
# use index to fetch layer
# f1: 13 x 13 x 704
f1 = peleenet.layers[365].output
# f2: 26 x 26 x 512
f2 = peleenet.layers[265].output
# f3: 52 x 52 x 256
f3 = peleenet.layers[133].output
f1_channel_num = 704
f2_channel_num = 512
f3_channel_num = 256
y1, y2, y3 = yolo3_ultralite_predictions((f1, f2, f3), (f1_channel_num, f2_channel_num, f3_channel_num), num_anchors, num_classes)
return Model(inputs = inputs, outputs=[y1,y2,y3])
def tiny_yolo3_ultralite_peleenet_body(inputs, num_anchors, num_classes):
'''Create Tiny YOLO_v3 Ultra-Lite PeleeNet model CNN body in keras.'''
peleenet = PeleeNet(input_tensor=inputs, weights='imagenet', include_top=False)
print('backbone layers number: {}'.format(len(peleenet.layers)))
# input: 416 x 416 x 3
# re_lu_338(layer 365, final feature map): 13 x 13 x 704
# re_lu_307(layer 265, end of stride 16) : 26 x 26 x 512
# re_lu_266(layer 133, end of stride 8) : 52 x 52 x 256
# NOTE: activation layer name may different for TF1.x/2.x, so we
# use index to fetch layer
# f1: 13 x 13 x 704
f1 = peleenet.layers[365].output
# f2: 26 x 26 x 512
f2 = peleenet.layers[265].output
f1_channel_num = 704
f2_channel_num = 512
y1, y2 = tiny_yolo3_ultralite_predictions((f1, f2), (f1_channel_num, f2_channel_num), num_anchors, num_classes)
return Model(inputs, [y1,y2])
| 38.091429
| 135
| 0.672667
| 1,083
| 6,666
| 3.975069
| 0.091413
| 0.069686
| 0.036237
| 0.055749
| 0.904297
| 0.900116
| 0.900116
| 0.900116
| 0.900116
| 0.894774
| 0
| 0.128355
| 0.228623
| 6,666
| 174
| 136
| 38.310345
| 0.708868
| 0.356286
| 0
| 0.746269
| 0
| 0
| 0.050658
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.089552
| false
| 0
| 0.074627
| 0
| 0.253731
| 0.089552
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9de66e26473b782ec2833d902fbb63de4b05d9dd
| 7,325
|
py
|
Python
|
baseline_classifiers.py
|
HKUST-KnowComp/multilingual_hate_speech
|
8c4c2268ed5f93a801ecd0aef31a12dd9d07d332
|
[
"MIT"
] | 51
|
2019-08-30T20:19:05.000Z
|
2022-03-21T13:28:49.000Z
|
baseline_classifiers.py
|
HKUST-KnowComp/multilingual_hate_speech
|
8c4c2268ed5f93a801ecd0aef31a12dd9d07d332
|
[
"MIT"
] | 1
|
2020-08-12T04:00:49.000Z
|
2020-08-21T03:16:39.000Z
|
baseline_classifiers.py
|
HKUST-KnowComp/multilingual_hate_speech
|
8c4c2268ed5f93a801ecd0aef31a12dd9d07d332
|
[
"MIT"
] | 6
|
2019-12-05T07:29:40.000Z
|
2021-09-02T02:52:14.000Z
|
import re
from collections import Counter
import os
import matplotlib
import numpy as np
import pandas as pd
from pandas import Series
from sklearn.pipeline import Pipeline
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.naive_bayes import MultinomialNB
from sklearn.metrics import accuracy_score
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import LabelBinarizer, LabelEncoder
from sklearn.metrics import classification_report
from annotated_data_processing import clean_text
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.metrics import accuracy_score
from sklearn.metrics import f1_score
from skmultilearn.problem_transform import ClassifierChain
from sklearn.dummy import DummyClassifier
from constants import LABELS
#majority voting for multilabel tasks: annotator's sentiment and hostility type (tweet sentiment)
def lr_multilabel_classification(train_filename, dev_filename, test_filename, attribute):
df_train = pd.read_csv(train_filename)
df_dev = pd.read_csv(dev_filename)
df_test = pd.read_csv(test_filename)
mlb = MultiLabelBinarizer()
X_train = df_train.tweet.apply(clean_text)
y_train_text = df_train[attribute].apply(lambda x: x.split('_'))
y_train = mlb.fit_transform(y_train_text)
X_dev = df_dev.tweet.apply(clean_text)
y_dev_text = df_dev[attribute].apply(lambda x: x.split('_'))
y_dev = mlb.fit_transform(y_dev_text)
X_test = df_test.tweet.apply(clean_text)
y_test_text = df_test[attribute].apply(lambda x: x.split('_'))
y_test = mlb.fit_transform(y_test_text)
count_vect = CountVectorizer()
X_train_counts = count_vect.fit_transform(X_train)
tfidf_transformer = TfidfTransformer()
X_train_tfidf = tfidf_transformer.fit_transform(X_train_counts)
Y = mlb.fit_transform(y_train_text)
classifier = Pipeline([
('vectorizer', CountVectorizer()),
('tfidf', TfidfTransformer()),
('clf', ClassifierChain(LogisticRegression()))])
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
print('accuracy %s' % accuracy_score(y_pred, y_test))
print('Test macro F1 score is %s' % f1_score(y_test, y_pred, average='macro'))
print('Test micro F1 score is %s' % f1_score(y_test, y_pred, average='micro'))
#majority voting for multilabel tasks: annotator's sentiment and hostility type (tweet sentiment)
def majority_voting_multilabel_classification(train_filename, dev_filename, test_filename, attribute):
df_train = pd.read_csv(train_filename)
df_dev = pd.read_csv(dev_filename)
df_test = pd.read_csv(test_filename)
mlb = MultiLabelBinarizer()
X_train = df_train.tweet.apply(clean_text)
y_train_text = df_train[attribute].apply(lambda x: x.split('_'))
y_train = mlb.fit_transform(y_train_text)
X_dev = df_dev.tweet.apply(clean_text)
y_dev_text = df_dev[attribute].apply(lambda x: x.split('_'))
y_dev = mlb.fit_transform(y_dev_text)
X_test = df_test.tweet.apply(clean_text)
y_test_text = df_test[attribute].apply(lambda x: x.split('_'))
y_test = mlb.fit_transform(y_test_text)
count_vect = CountVectorizer()
X_train_counts = count_vect.fit_transform(X_train)
tfidf_transformer = TfidfTransformer()
X_train_tfidf = tfidf_transformer.fit_transform(X_train_counts)
Y = mlb.fit_transform(y_train_text)
classifier = Pipeline([
('vectorizer', CountVectorizer()),
('tfidf', TfidfTransformer()),
('clf', ClassifierChain(DummyClassifier()))])
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
print('Accuracy %s' % accuracy_score(y_pred, y_test))
print('Test macro F1 score is %s' % f1_score(y_test, y_pred, average='macro'))
print('Test micro F1 score is %s' % f1_score(y_test, y_pred, average='micro'))
#majority voting for non mumtilabel tasks namely: target, group and directness
def majority_voting_non_multilabel_classification(train_filename, dev_filename, test_filename, attribute):
my_labels=LABELS[attribute]
df_train = pd.read_csv(train_filename)
df_dev = pd.read_csv(dev_filename)
df_test = pd.read_csv(test_filename)
X_train = df_train.tweet.apply(clean_text)
y_train = df_train[attribute]
X_dev = df_dev.tweet.apply(clean_text)
y_dev = df_dev[attribute]
X_test = df_test.tweet.apply(clean_text)
y_test = df_test[attribute]
count_vect = CountVectorizer()
X_train_counts = count_vect.fit_transform(X_train)
tfidf_transformer = TfidfTransformer()
X_train_tfidf = tfidf_transformer.fit_transform(X_train_counts)
dummy = Pipeline([('vect', CountVectorizer()),
('tfidf', TfidfTransformer()),
('clf', DummyClassifier()),
])
dummy.fit(X_train, y_train)
y_pred = dummy.predict(X_test)
print('Accuracy %s' % accuracy_score(y_pred, y_test))
print(classification_report(y_test, y_pred,target_names=my_labels,labels=my_labels))
print('Test macro F1 score is %s' % f1_score(y_test, y_pred, average='macro'))
print('Test micro F1 score is %s' % f1_score(y_test, y_pred, average='micro'))
#logistic regression for non mumtilabel tasks namely: target, group and directness
def lr_non_multilabel_classification(train_filename, dev_filename, test_filename, attribute):
my_labels=LABELS[attribute]
df_train = pd.read_csv(train_filename)
df_dev = pd.read_csv(dev_filename)
df_test = pd.read_csv(test_filename)
X_train = df_train.tweet.apply(clean_text)
y_train = df_train[attribute]
X_dev = df_dev.tweet.apply(clean_text)
y_dev = df_dev[attribute]
X_test = df_test.tweet.apply(clean_text)
y_test = df_test[attribute]
count_vect = CountVectorizer()
X_train_counts = count_vect.fit_transform(X_train)
tfidf_transformer = TfidfTransformer()
X_train_tfidf = tfidf_transformer.fit_transform(X_train_counts)
logreg = Pipeline([('vect', CountVectorizer()),
('tfidf', TfidfTransformer()),
('clf', LogisticRegression(n_jobs=1, C=1e5)),
])
logreg.fit(X_train, y_train)
y_pred = logreg.predict(X_test)
print('accuracy %s' % accuracy_score(y_pred, y_test))
print('Test macro F1 score is %s' % f1_score(y_test, y_pred, average='macro'))
print('Test micro F1 score is %s' % f1_score(y_test, y_pred, average='micro'))
def run_majority_voting(train_filename, dev_filename, test_filename, attribute):
#multilabel tasks
if(attribute=='sentiment' or attribute=='annotator_sentiment'):
return majority_voting_multilabel_classification(train_filename, dev_filename, test_filename, attribute)
#non mutilabel tasks
elif(attribute=='target' or attribute =='group' or attribute=='directness'):
return majority_voting_non_multilabel_classification(train_filename, dev_filename, test_filename, attribute)
def run_logistic_regression(train_filename, dev_filename, test_filename, attribute):
#multilabel tasks
if(attribute=='sentiment' or attribute=='annotator_sentiment'):
return lr_multilabel_classification(train_filename, dev_filename, test_filename, attribute)
#non mutilabel tasks
elif(attribute=='target' or attribute =='group' or attribute=='directness'):
return lr_non_multilabel_classification(train_filename, dev_filename, test_filename, attribute)
| 46.360759
| 112
| 0.772833
| 1,038
| 7,325
| 5.139692
| 0.11079
| 0.026992
| 0.020244
| 0.042737
| 0.849672
| 0.849672
| 0.816307
| 0.792315
| 0.774321
| 0.774321
| 0
| 0.003122
| 0.125324
| 7,325
| 157
| 113
| 46.656051
| 0.829561
| 0.057338
| 0
| 0.705036
| 0
| 0
| 0.064956
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043165
| false
| 0
| 0.172662
| 0
| 0.244604
| 0.093525
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
06135d1b2cd96b993aeb0fcb7c4605bf49a40f95
| 256
|
py
|
Python
|
tests/tests_some_module/test_42.py
|
jerabaul29/example_python_package
|
4c9a47709e0317eaa00e5d78815da9568cbe51d0
|
[
"MIT"
] | null | null | null |
tests/tests_some_module/test_42.py
|
jerabaul29/example_python_package
|
4c9a47709e0317eaa00e5d78815da9568cbe51d0
|
[
"MIT"
] | 2
|
2021-05-05T20:51:44.000Z
|
2021-05-09T20:11:07.000Z
|
tests/tests_some_module/test_42.py
|
jerabaul29/example_python_package
|
4c9a47709e0317eaa00e5d78815da9568cbe51d0
|
[
"MIT"
] | 1
|
2021-02-01T08:37:28.000Z
|
2021-02-01T08:37:28.000Z
|
from example_package.some_module import some_module_42
def test_42_passing():
"""Example of a passing test."""
assert(some_module_42() == 42)
# def test_42_failing():
# """Example of a failing test."""
# assert(some_module_42() == 43)
| 19.692308
| 54
| 0.671875
| 37
| 256
| 4.324324
| 0.405405
| 0.25
| 0.225
| 0.1375
| 0.275
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067308
| 0.1875
| 256
| 12
| 55
| 21.333333
| 0.701923
| 0.476563
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ae9c5a78c5d211e13bf575a21b751f000ad6ebbd
| 280,426
|
py
|
Python
|
genslice/genslice.py
|
Chenguang-Zhu/icsme20-artifact
|
13259683d712b499810774b3ed300dcab0dce989
|
[
"Apache-2.0"
] | null | null | null |
genslice/genslice.py
|
Chenguang-Zhu/icsme20-artifact
|
13259683d712b499810774b3ed300dcab0dce989
|
[
"Apache-2.0"
] | null | null | null |
genslice/genslice.py
|
Chenguang-Zhu/icsme20-artifact
|
13259683d712b499810774b3ed300dcab0dce989
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import os
import sys
import re
import time
import json
import argparse
import shutil
import collections
import subprocess as sub
from goto import with_goto
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) # Dir of this script
DOWNLOADS_DIR = SCRIPT_DIR + '/../_downloads'
CONFIGS_DIR = SCRIPT_DIR + '/../file-level/orig-configs'
SPLIT_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/split-configs'
DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/definer-configs'
SPLIT_CSLICER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/split-cslicer'
SPLIT_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/split-definer'
CSLICER_SPLIT_CSLICER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/cslicer-split-cslicer'
CSLICER_SPLIT_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/cslicer-split-definer'
DEFINER_SPLIT_CSLICER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/definer-split-cslicer'
DEFINER_SPLIT_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/definer-split-definer'
CSLICER_DEFINER_SPLIT_CSLICER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/cslicer-definer-split-cslicer'
CSLICER_DEFINER_SPLIT_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/cslicer-definer-split-definer'
CSLICER_STANDALONE_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/cslicer'
DEFINER_STANDALONE_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/definer'
CSLICER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/cslicer-definer'
SPLIT_CSLICER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/split-cslicer-definer'
SPLIT_CSLICER_DEFINER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/split-cslicer-definer-definer'
CSLICER_SPLIT_DEFINER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/cslicer-split-definer-definer'
DEFINER_SPLIT_CSLICER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/definer-split-cslicer-definer'
DEFINER_CSLICER_SPLIT_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/definer-cslicer-split-definer'
SPLIT_DEFINER_CSLICER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/split-definer-cslicer-definer'
CSLICER_DEFINER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/cslicer-definer-definer'
DEFINER_CSLICER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/definer-cslicer-definer'
SPLIT_DEFINER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/split-definer-definer'
DEFINER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/configs/definer-definer'
CSLICER_DEFINER_SPLIT_CSLICER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + \
'/../file-level/configs/cslicer-definer-split-cslicer-definer'
CSLICER_SPLIT_DEFINER_CSLICER_DEFINER_CONFIGS_DIR = SCRIPT_DIR + \
'/../file-level/configs/cslicer-split-definer-cslicer-definer'
POMS_DIR = SCRIPT_DIR + '/../file-level/example-poms'
CSLICER_SPLIT_CSLICER_SECOND_PHASE_POM_DIR = SCRIPT_DIR + '/../file-level/second-phase-poms'
JACOCOS_DIR = SCRIPT_DIR + '/../file-level/jacoco-files'
CSLICER_ORIG_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/cslicer-orig-output'
CSLICER_SPLIT_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/cslicer-split-output'
OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output'
SPLIT_CSLICER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/split-cslicer'
SPLIT_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/split-definer'
CSLICER_SPLIT_CSLICER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/cslicer-split-cslicer'
CSLICER_SPLIT_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/cslicer-split-definer'
DEFINER_SPLIT_CSLICER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/definer-split-cslicer'
DEFINER_SPLIT_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/definer-split-definer'
CSLICER_DEFINER_SPLIT_CSLICER_OUTPUT_DIR = SCRIPT_DIR + \
'/../file-level/output/cslicer-definer-split-cslicer'
CSLICER_DEFINER_SPLIT_DEFINER_OUTPUT_DIR = SCRIPT_DIR + \
'/../file-level/output/cslicer-definer-split-definer'
CSLICER_STANDALONE_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/cslicer'
DEFINER_STANDALONE_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/definer'
CSLICER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/cslicer-definer'
SPLIT_CSLICER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/split-cslicer-definer'
SPLIT_CSLICER_DEFINER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/split-cslicer-definer-definer'
CSLICER_SPLIT_DEFINER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/cslicer-split-definer-definer'
DEFINER_SPLIT_CSLICER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/definer-split-cslicer-definer'
DEFINER_CSLICER_SPLIT_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/definer-cslicer-split-definer'
SPLIT_DEFINER_CSLICER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/split-definer-cslicer-definer'
CSLICER_DEFINER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/cslicer-definer-definer'
DEFINER_CSLICER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/definer-cslicer-definer'
SPLIT_DEFINER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/split-definer-definer'
DEFINER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + '/../file-level/output/definer-definer'
CSLICER_DEFINER_SPLIT_CSLICER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + \
'/../file-level/output/cslicer-definer-split-cslicer-definer'
CSLICER_SPLIT_DEFINER_CSLICER_DEFINER_OUTPUT_DIR = SCRIPT_DIR + \
'/../file-level/output/cslicer-split-definer-cslicer-definer'
# For true minimal exp
DEFINER_WITH_MEMORY_STANDALONE_OUTPUT_DIR = SCRIPT_DIR + \
'/../file-level/output/definer-with-memory'
TEMP_LOGS_DIR = SCRIPT_DIR + '/../file-level/temp-logs'
TEMP_CONFIGS_DIR = SCRIPT_DIR + '/../file-level/temp-configs'
TEMP_FILES_DIR = SCRIPT_DIR + '/../file-level/temp-files'
REPOS_BACKUP_DIR = SCRIPT_DIR + '/../file-level/_repos'
SPLIT_LOGS_DIR = SCRIPT_DIR + '/../file-level/_split_logs'
VALIDATE_LOGS_DIR = SCRIPT_DIR + '/../file-level/_validate_logs'
CSLICER_JAR_PATH = SCRIPT_DIR + '/cslicer-1.0.0-jar-with-dependencies.jar'
NUMBERS_TEX_PATH = SCRIPT_DIR + '/../file-level/results/tables/examples-numbers.tex'
TABLE_TEX_PATH = SCRIPT_DIR + '/../file-level/results/tables/examples-table.tex'
TIME_TABLE_TEX_PATH = SCRIPT_DIR + '/../file-level/results/tables/time-table.tex'
AST_LINES_TABLE_TEX_PATH = SCRIPT_DIR + '/../file-level/results/tables/ast-lines-table.tex'
TEST_CLASSES_BACKUP_DIR = SCRIPT_DIR + '/../file-level/_test_classes_backup'
TOUCH_SET_DIR = SCRIPT_DIR + '/../file-level/touchset'
CACHED_REPOS_DIR = SCRIPT_DIR + '/../file-level/cached-repos'
SUFFIX_SHARING_CACHE_DIR = SCRIPT_DIR + '/../file-level/suffix-cache'
ORIG_HISTORY_DIR = SCRIPT_DIR + '/../file-level/orig-history'
SPLIT_TEMP_FILE = '/tmp/split.tmp'
# Exp 1
definer_optimal_exp_examples = ['COMPRESS-375', 'CONFIGURATION-626', 'CSV-159',
'FLUME-2628', 'IO-275', 'IO-288', 'PDFBOX-3069',
'PDFBOX-3307', 'PDFBOX-3418']
# Exp 2 & 3
examples = ['COMPRESS-327', 'COMPRESS-369', 'COMPRESS-373',
'COMPRESS-374', 'COMPRESS-375',
'CONFIGURATION-624', 'CONFIGURATION-626',
'CSV-159', 'CSV-175', 'CSV-179', 'CSV-180',
'FLUME-2628',
'IO-173', 'IO-275', 'IO-288', 'IO-290', 'IO-305',
'LANG-993', 'LANG-1006',
'MNG-4904', 'MNG-4909', 'MNG-4910',
'NET-436', 'NET-525', 'NET-527',
'PDFBOX-3069', 'PDFBOX-3418', 'PDFBOX-3307']
def parseArgs(argv):
'''
Parse the args of the script.
'''
parser = argparse.ArgumentParser()
parser.add_argument('--clean-prefix-cache', help='Clean cached repos', \
action='store_true', required=False)
parser.add_argument('--clean-suffix-cache', help='Clean cached suffix', \
action='store_true', required=False)
parser.add_argument('--share-prefix', help='Enable prefix sharing', \
action='store_true', required=False)
parser.add_argument('--share-suffix', help='Enable suffix sharing', \
action='store_true', required=False)
parser.add_argument('--clean-touchset', help='Clean touch set', \
action='store_true', required=False)
parser.add_argument('--split-cslicer', help='Run split then cslicer', \
action='store_true', required=False)
parser.add_argument('--split-definer', help='Run split then definer', \
action='store_true', required=False)
parser.add_argument('--cslicer-split-cslicer', help='Run cslicer then split then cslicer', \
action='store_true', required=False)
parser.add_argument('--cslicer-split-definer', help='Run cslicer then split then definer', \
action='store_true', required=False)
parser.add_argument('--definer-split-cslicer', help='Run definer then split then cslicer', \
action='store_true', required=False)
parser.add_argument('--definer-split-definer', help='Run definer then split then definer', \
action='store_true', required=False)
parser.add_argument('--cslicer-definer-split-cslicer', \
help='Run cslicer then definer then split then cslicer', \
action='store_true', required=False)
parser.add_argument('--cslicer-definer-split-definer', \
help='Run cslicer then definer then split then definer', \
action='store_true', required=False)
parser.add_argument('--cslicer', help='Run cslicer standalone', \
action='store_true', required=False)
parser.add_argument('--definer', help='Run definer standalone', \
action='store_true', required=False)
parser.add_argument('--cslicer-definer', help='Run cslicer definer', \
action='store_true', required=False)
parser.add_argument('--split-cslicer-definer', help='Run split cslicer definer', \
action='store_true', required=False)
parser.add_argument('--split-cslicer-definer-definer', \
help='Run split cslicer definer definer', \
action='store_true', required=False)
parser.add_argument('--cslicer-split-definer-definer', \
help='Run cslicer split definer definer', \
action='store_true', required=False)
parser.add_argument('--definer-split-cslicer-definer', \
help='Run definer split cslicer definer', \
action='store_true', required=False)
parser.add_argument('--definer-cslicer-split-definer', \
help='Run definer cslicer split definer', \
action='store_true', required=False)
parser.add_argument('--split-definer-cslicer-definer', \
help='Run split definer cslicer definer', \
action='store_true', required=False)
parser.add_argument('--cslicer-definer-definer', help='Run cslicer definer definer', \
action='store_true', required=False)
parser.add_argument('--definer-cslicer-definer', help='Run definer cslicer definer', \
action='store_true', required=False)
parser.add_argument('--split-definer-definer', help='Run split definer definer', \
action='store_true', required=False)
parser.add_argument('--definer-definer', help='Run definer definer', \
action='store_true', required=False)
parser.add_argument('--cslicer-definer-split-cslicer-definer', \
help='Run cslicer definer split cslicer definer', \
action='store_true', required=False)
parser.add_argument('--cslicer-split-definer-cslicer-definer', \
help='Run cslicer split definer cslicer definer', \
action='store_true', required=False)
parser.add_argument('--definer-with-memory', help='Run memorized definer', \
action='store_true', required=False) # for true minimal exp
parser.add_argument('--split-cslicer-one', help='Run split then cslicer', \
required=False)
parser.add_argument('--split-definer-one', help='Run split then definer', \
required=False)
parser.add_argument('--cslicer-split-cslicer-one', \
help='Run cslicer then split then cslicer', \
required=False)
parser.add_argument('--cslicer-split-definer-one', \
help='Run cslicer then split then definer', \
required=False)
parser.add_argument('--definer-split-cslicer-one', \
help='Run definer then split then cslicer', \
required=False)
parser.add_argument('--definer-split-definer-one', \
help='Run definer then split then definer', \
required=False)
parser.add_argument('--cslicer-definer-split-cslicer-one', \
help='Run cslicer then definer then split then cslicer', \
required=False)
parser.add_argument('--cslicer-definer-split-definer-one', \
help='Run cslicer then definer then split then definer', \
required=False)
parser.add_argument('--cslicer-one', help='Run cslicer standalone', \
required=False)
parser.add_argument('--definer-one', help='Run definer standalone', \
required=False)
parser.add_argument('--cslicer-definer-one', help='Run cslicer definer', \
required=False)
parser.add_argument('--split-cslicer-definer-one', help='Run split cslicer definer', \
required=False)
parser.add_argument('--split-cslicer-definer-definer-one', \
help='Run split cslicer definer definer', \
required=False)
parser.add_argument('--cslicer-split-definer-definer-one', \
help='Run cslicer split definer definer', \
required=False)
parser.add_argument('--definer-split-cslicer-definer-one', \
help='Run definer split cslicer definer', \
required=False)
parser.add_argument('--definer-cslicer-split-definer-one', \
help='Run definer cslicer split definer', \
required=False)
parser.add_argument('--split-definer-cslicer-definer-one', \
help='Run split definer cslicer definer', \
required=False)
parser.add_argument('--cslicer-definer-definer-one', help='Run cslicer definer definer', \
required=False)
parser.add_argument('--definer-cslicer-definer-one', help='Run definer cslicer definer', \
required=False)
parser.add_argument('--split-definer-definer-one', help='Run split definer definer', \
required=False)
parser.add_argument('--definer-definer-one', help='Run definer definer', \
required=False)
parser.add_argument('--cslicer-definer-split-cslicer-definer-one', \
help='Run cslicer definer split cslicer definer', \
required=False)
parser.add_argument('--cslicer-split-definer-cslicer-definer-one', \
help='Run cslicer split definer cslicer definer', \
required=False)
parser.add_argument('--definer-with-memory-one', help='Run memorized definer', \
action='store_true', required=False) # for true minimal exp
if (len(argv) == 0):
parser.print_help()
exit(1)
opts = parser.parse_args(argv)
return opts
def searchFile(dir_root, file_name):
for dir_path, subpaths, files in os.walk(dir_root):
for f in files:
if f == file_name:
return dir_path + '/' + f
return None
def replacePomSurefireVersions(example, repo_path, new_pom_file):
'''
update pom file to use a newer surefire version to support the "mvn test # +" format
'''
if example.startswith('PDFBOX'):
pom_path = repo_path + '/pdfbox/pom.xml'
else:
pom_path = repo_path + '/pom.xml' # single module projects
shutil.copyfile(new_pom_file, pom_path)
# insert argLine for all the submodules
if example.startswith('MNG') or example.startswith('CALCITE') or example.startswith('FLUME'):
poms = findAllPomsInDir(repo_path)
for pom in poms:
if '/src/test' not in pom:
insertArgsInOnePom(pom)
def findAllPomsInDir(target_dir):
poms = []
for dir_path, subpaths, files in os.walk(target_dir):
for f in files:
if f == 'pom.xml':
poms.append(dir_path + '/' + f)
return poms
def insertArgsInOnePom(pom):
fr = open(pom, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if '<artifactId>maven-surefire-plugin</artifactId>' in lines[i]:
for j in range(i, len(lines)):
if '</plugin>' in lines[j]:
break
for k in range(i, j):
if '<argLine>' in lines[k]:
lines[k] = lines[k].replace('</argLine>', ' ${argLine}</argLine>')
fw = open(pom, 'w')
fw.write(''.join(lines))
fw.close()
def extractInfoFromCSlicerConfigs(example):
'''
read start commit, end commit, repo, and test suite
'''
# find the config file
config_file = searchFile(CONFIGS_DIR, example + '.properties')
if config_file == None:
print ('Cannot find config file!')
exit(0)
fr = open(config_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if lines[i].startswith('startCommit'):
start = lines[i].strip().split()[-1]
elif lines[i].startswith('endCommit'):
end = lines[i].strip().split()[-1]
elif lines[i].startswith('repoPath'):
repo_name = lines[i].split('/')[-2]
elif lines[i].startswith('testScope'):
test_suite = lines[i].strip().split()[-1]
repo_path = DOWNLOADS_DIR + '/' + repo_name
#print (start, end, repo_name, test_suite, repo_path)
return start, end, repo_name, test_suite, repo_path, lines, config_file
def extractInfoFromDefinerConfigs(example):
# find the config file
config_file = searchFile(DEFINER_CONFIGS_DIR, example + '.properties')
if config_file == None:
print ('Cannot find config file!')
exit(0)
fr = open(config_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if lines[i].startswith('startCommit'):
start = lines[i].strip().split()[-1]
elif lines[i].startswith('endCommit'):
end = lines[i].strip().split()[-1]
elif lines[i].startswith('repoPath'):
repo_name = lines[i].split('/')[-2]
elif lines[i].startswith('buildScriptPath'):
build_script_path = lines[i].strip().split()[-1]
elif lines[i].startswith('testScope'):
test_suite = lines[i].strip().split()[-1]
repo_path = DOWNLOADS_DIR + '/' + repo_name
#print (start, end, repo_name, test_suite, repo_path)
return start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file
def updateCSlicerConfig(example, end, dst_dir):
# find the config file
config_file = searchFile(CONFIGS_DIR, example + '.properties')
if config_file == None:
print ('Cannot find config file!')
exit(0)
fr = open(config_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if lines[i].startswith('endCommit'):
lines[i] = ' '.join(lines[i].split()[:-1]) + ' ' + end + '\n'
updated_config_file = dst_dir + '/' + example + '.properties'
fw = open(updated_config_file, 'w')
fw.write(''.join(lines))
fw.close()
return updated_config_file
def updateDefinerConfig(example, end, dst_dir):
# find the config file
config_file = searchFile(DEFINER_CONFIGS_DIR, example + '.properties')
if config_file == None:
print ('Cannot find config file!')
exit(0)
fr = open(config_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if lines[i].startswith('endCommit'):
lines[i] = ' '.join(lines[i].split()[:-1]) + ' ' + end + '\n'
updated_config_file = dst_dir + '/' + example + '.properties'
fw = open(updated_config_file, 'w')
fw.write(''.join(lines))
fw.close()
return updated_config_file
def runTestsGenJacoco(example, end, repo_path, test_suite, poms_dir=POMS_DIR):
# run mvn test at the end commit, generate jacoco
os.chdir(repo_path)
sub.run('git checkout ' + end + ' -b orig', shell=True)
new_pom_file = searchFile(poms_dir, example + '.pom.xml')
replacePomSurefireVersions(example, repo_path, new_pom_file)
sub.run('mvn install -DskipTests', shell=True, \
stdout=open(os.devnull, 'w'), stderr=open(os.devnull, 'w'))
# multimodule
submodule_path = getSubModulePathForAGivenProject(example)
os.chdir(repo_path + submodule_path)
sub.run('mvn test -Dtest=' + test_suite, shell=True)
# save jacoco file for analysis
target_path = getTargetPathForAGivenProject(example)
jacoco_path = repo_path + target_path + '/jacoco.exec'
shutil.move(jacoco_path, JACOCOS_DIR + '/' + example + '-jacoco.exec')
os.chdir(repo_path)
def runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite, poms_dir=POMS_DIR):
os.chdir(repo_path)
# delete definerorig branch if already exist
sub.run('git checkout trunk', shell=True)
sub.run('git checkout master', shell=True)
print ('delete definerorig branch')
sub.run('git branch -D definerorig', shell=True)
# create definerorig branch
sub.run('git checkout ' + end + ' -b definerorig', shell=True)
new_pom_file = searchFile(poms_dir, example + '.pom.xml')
replacePomSurefireVersions(example, repo_path, new_pom_file)
sub.run('mvn install -DskipTests', shell=True, \
stdout=open(os.devnull, 'w'), stderr=open(os.devnull, 'w'))
# multi-module projects
submodule_path = getSubModulePathForAGivenProject(example)
os.chdir(repo_path + submodule_path)
if example == 'PDFBOX-3262':
preprocessPDFBOX3262(repo_path) # Only for PDFBOX-3262
sub.run('mvn test -Dtest=' + test_suite, shell=True)
os.chdir(repo_path)
# copy target/test-classes to temp dir
if os.path.isdir(TEST_CLASSES_BACKUP_DIR + '/test-classes'):
shutil.rmtree(TEST_CLASSES_BACKUP_DIR + '/test-classes')
# multi-module projects
target_path = getTargetPathForAGivenProject(example)
test_classes_path = repo_path + target_path + '/test-classes'
shutil.copytree(test_classes_path, TEST_CLASSES_BACKUP_DIR + '/test-classes')
# stash changes on pom
sub.run('git stash', shell=True)
def preprocessPDFBOX3262(repo_path):
test_file = searchFile(repo_path, 'PDAcroFormFlattenTest.java')
fr = open(test_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if 'public void testFlattenPDFBOX3262() throws IOException' in lines[i]:
lines[i-1] = lines[i-1].replace('// @Test', '@Test')
fw = open(test_file, 'w')
fw.write(''.join(lines))
fw.close()
def splitCommitsByFile(example, repo_path, start, end, branch='filelevel'):
# split commits by file, create separate branches
# measure the overhead of splitting
split_commits_start_time = time.time()
os.chdir(SCRIPT_DIR)
print ('===> Splitting ...')
sub.run('python3 split_commits.py --repo ' + repo_path + \
' --start ' + start + \
' --end ' + end + \
' --branch ' + branch, shell=True, \
stdout=open(SPLIT_LOGS_DIR + '/' + example + '.logs', 'w'),
stderr=sub.STDOUT)
split_commits_end_time = time.time()
split_commits_overhead = split_commits_end_time - split_commits_start_time
# write the time into split logs
fr = open(SPLIT_LOGS_DIR + '/' + example + '.logs', 'r', encoding = 'ISO-8859-1')
split_lines = fr.readlines()
fr.close()
split_lines.append(str(split_commits_overhead))
fw = open(SPLIT_LOGS_DIR + '/' + example + '.logs', 'w')
fw.write(''.join(split_lines))
fw.close()
def genSplittedConfigFile(example, repo_path, lines, configs_dir, branch='filelevel'):
# get the sha of splitted end commit, create config files
os.chdir(repo_path)
sub.run('git checkout ' + branch, shell=True)
p = sub.Popen('git --no-pager log --oneline -1', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
file_level_end_commit = p.stdout.readlines()[0].decode("utf-8").split()[0]
for i in range(len(lines)):
if lines[i].startswith('endCommit'):
lines[i] = ' '.join(lines[i].split()[:-1]) + ' ' + file_level_end_commit + '\n'
split_config_file = configs_dir + '/' + example + '.split.properties'
fw = open(split_config_file, 'w')
fw.write(''.join(lines))
fw.close()
return split_config_file
def runCSlicerTool(cslicer_log, config_file, branch):
sub.run('git checkout ' + branch, shell=True)
sub.run('java -jar ' + CSLICER_JAR_PATH + ' -c ' + config_file + \
' -e slicer', shell=True, \
stdout=open(cslicer_log, 'w'), stderr=sub.STDOUT)
def runDefinerTool(definer_log, config_file, branch):
sub.run('git checkout ' + branch, shell=True)
fw = open(definer_log, 'w')
if 'CONFIGURATION-466' in config_file:
p = sub.run('timeout 14400 java -jar ' + CSLICER_JAR_PATH + ' -c ' + config_file + \
' -e refiner -l noinv -q', shell=True, stdout=fw, stderr=fw)
else:
p = sub.run('timeout 14400 java -jar ' + CSLICER_JAR_PATH + ' -c ' + config_file + \
' -e refiner -l noinv', shell=True, stdout=fw, stderr=fw)
# try:
# p.wait(timeout=10) # 30 min
# except sub.TimeoutExpired:
# print('Definer time out!')
# p.terminate()
# return
# For true minimal exp
def runDefinerToolWithMemory(definer_log, config_file, branch):
sub.run('git checkout ' + branch, shell=True)
fw = open(definer_log, 'w')
p = sub.run('timeout 7200 java -jar ' + CSLICER_JAR_PATH + ' -c ' + config_file + \
' -e srr -l noinv', shell=True, stdout=fw, stderr=fw)
def extractHistorySliceFromCSlicerLog(cslicer_log):
fr = open(cslicer_log)
lines = fr.readlines()
fr.close()
commit_list = []
commit_msg_list = []
for i in range(len(lines)):
if lines[i].startswith('TEST: ') or \
lines[i].startswith('COMP: ') or \
lines[i].startswith('HUNK: '):
commit = lines[i].split()[1]
commit_msg = lines[i].strip().split(' : ')[-1]
commit_list.append(commit)
commit_msg_list.append(commit_msg)
commit_list.reverse()
commit_msg_list.reverse()
return commit_list, commit_msg_list
def extractHistorySliceFromDefinerLog(definer_log):
fr = open(definer_log)
lines = fr.readlines()
fr.close()
commit_list = []
commit_msg_list = []
for i in range(len(lines)):
if lines[i].startswith('[OUTPUT] H*:'):
commit = lines[i].split()[2]
commit_msg = lines[i].strip().split(' : ')[-1]
commit_list.append(commit)
commit_msg_list.append(commit_msg)
return commit_list, commit_msg_list
def applyHistorySlice(repo_path, start, history_slice, commit_msg_list, branch_name):
cwd = os.getcwd()
os.chdir(repo_path)
sub.run('git checkout ' + start + ' -b ' + branch_name, shell=True)
# print ('===> Applying History Slice ...')
for i in range(len(history_slice)):
commit = history_slice[i]
commit_msg = commit_msg_list[i].replace('\"', '')
# print ('Applying commit: ' + commit + ' ' + commit_msg)
# drop changes on src/test
sub.run('git cherry-pick -n ' + commit, shell=True, stdout=open(os.devnull, 'w'), \
stderr=sub.STDOUT)
p = sub.Popen('git status', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
lines = p.stdout.readlines()
for i in range(len(lines)):
lines[i] = lines[i].decode("utf-8")[:-1]
if ('modified: ' in lines[i] and 'src/test/' in lines[i]) or \
('both modified: ' in lines[i] and 'src/test/' in lines[i]) or \
('deleted by us: ' in lines[i] and 'src/test/' in lines[i]) or \
('added by us: ' in lines[i] and 'src/test/' in lines[i]) or \
('both deleted: ' in lines[i] and 'src/test/' in lines[i]) or \
('added by them: ' in lines[i] and 'src/test/' in lines[i]):
file_path = lines[i].strip().split()[-1]
sub.run('git reset ' + file_path, shell=True, stdout=open(os.devnull, 'w'), \
stderr=sub.STDOUT)
sub.run('git checkout -- ' + file_path, shell=True, \
stdout=open(os.devnull, 'w'), stderr=sub.STDOUT)
sub.run('git rm ' + file_path, shell=True, \
stdout=open(os.devnull, 'w'), stderr=sub.STDOUT)
if 'both modified: ' in lines[i] and 'src/main/' in lines[i]:
file_path = lines[i].strip().split()[-1]
resolveConflict(file_path)
sub.run('git add ' + file_path, shell=True, stdout=open(os.devnull, 'w'), \
stderr=sub.STDOUT)
# configuration: target dir not ignored
if repo_path.endswith('commons-configuration'):
os.system('rm -rf target')
os.system('find -name test -type d | xargs rm -rf')
os.system('git checkout .')
# untracked files
# p = sub.Popen('git ls-files --others --exclude-standard', shell=True, \
# stdout=sub.PIPE, stderr=sub.PIPE)
# p.wait()
# lines = p.stdout.readlines()
# for i in range(len(lines)):
# lines[i] = lines[i].decode("utf-8")[:-1]
# if 'src/test' in lines[i]:
# os.remove(lines[i].strip())
sub.run('git commit -m \"' + commit_msg + '\"', shell=True, \
stdout=open(os.devnull, 'w'), stderr=sub.STDOUT)
# get the new end commit
p = sub.Popen('git --no-pager log --oneline -1', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
end_commit = p.stdout.readlines()[0].decode("utf-8").split()[0]
os.chdir(cwd)
return end_commit
def resolveConflict(file_path):
fr = open(file_path, 'r')
lines = fr.readlines()
fr.close()
new_lines = ''
i = 0
while i < len(lines):
if '<<<<<<< HEAD' in lines[i].strip():
for j in range(i, len(lines)):
if '=======' in lines[j].strip():
i = j+1
break
continue
if '>>>>>>>' in lines[i].strip():
i += 1
continue
new_lines += lines[i]
i +=1
fw = open(file_path, 'w')
fw.write(new_lines)
fw.close()
def countChangedLines(log_file, repo, tool):
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
if tool == 'cslicer':
commits, _ = extractHistorySliceFromCSlicerLog(log_file)
elif tool == 'definer':
commits, _ = extractHistorySliceFromDefinerLog(log_file)
elif tool == 'split':
commits, _ = extractHistorySliceFromSplitLog(log_file)
total_num_of_insertions = 0
total_num_of_deletions = 0
total_num_of_test_edits = 0
os.chdir(repo)
for sha in commits:
p = sub.Popen('git --no-pager log --stat=150 ' + sha + ' -1', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
commit_messages = p.stdout.readlines()
#for msg in commit_messages:
# print (msg.decode("utf-8"))
last_line = commit_messages[-1].decode("utf-8")[:-1]
if 'insertion' in last_line:
num_of_insertions = int(last_line.split('insertion')[0].split(',')[1].strip())
print (num_of_insertions)
total_num_of_insertions += num_of_insertions
else:
num_of_insertions = 0
if 'deletion' in last_line:
num_of_deletions = int(last_line.split('deletion')[0].split(',')[-1].strip())
print (num_of_deletions)
total_num_of_deletions += num_of_deletions
else:
num_of_deletions = 0
# fix: we should ignore test edits
num_of_test_edits = 0
for i in range(len(commit_messages)):
msg = commit_messages[i].decode('utf-8')
if 'src/test/' in msg and ' | ' in msg:
if msg.split('| ')[1].split()[0] == 'Bin':
continue
num_of_test_edits += int(msg.split('| ')[1].split()[0])
# if re.search(re.compile('\/.*Test\.java.*\|'), msg):
# if msg.split('| ')[1].split()[0] == 'Bin':
# continue
# num_of_test_edits += int(msg.split('| ')[1].split()[0])
# elif re.search(re.compile('\/Test.*\.java.*\|'), msg):
# if msg.split('| ')[1].split()[0] == 'Bin':
# continue
# num_of_test_edits += int(msg.split('| ')[1].split()[0])
total_num_of_test_edits += num_of_test_edits
total_num_of_edits = total_num_of_insertions + total_num_of_deletions
total_num_of_edits -= total_num_of_test_edits
lines.append('Total Changed Lines: ' + str(total_num_of_edits) + '\n')
fw = open(log_file, 'w')
fw.write(''.join(lines))
fw.close()
# CZ: old, upgrade to insertTimeDictinLog() in the future
def putTimeinLog(log_file, run_time):
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
lines.append('Total Run Time: ' + str(run_time) + '\n')
fw = open(log_file, 'w')
fw.write(''.join(lines))
fw.close()
def insertTimeDictinLog(log_file, time_dict):
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
for key in time_dict:
lines += key + ': ' + str(time_dict[key]) + '\n'
fw = open(log_file, 'w')
fw.write(''.join(lines))
fw.close()
def backupRepoForDebugging(example, repo_path):
if os.path.isdir(REPOS_BACKUP_DIR + '/' + example + '-repo'):
shutil.rmtree(REPOS_BACKUP_DIR + '/' + example + '-repo')
sub.run('cp -r ' + repo_path + ' ' + REPOS_BACKUP_DIR + '/' + example + '-repo', shell=True)
def cleanTempLogs():
if os.path.isdir(TEMP_LOGS_DIR):
shutil.rmtree(TEMP_LOGS_DIR)
os.makedirs(TEMP_LOGS_DIR)
if os.path.isdir(TEMP_CONFIGS_DIR):
shutil.rmtree(TEMP_CONFIGS_DIR)
os.makedirs(TEMP_CONFIGS_DIR)
if os.path.isdir(TEMP_FILES_DIR):
shutil.rmtree(TEMP_FILES_DIR)
os.makedirs(TEMP_FILES_DIR)
def cleanRepoAfterDefinerTimeout(repo_path):
cwd = os.getcwd()
os.chdir(repo_path)
print ('After definer timeout, clean')
# remove git lock file
if os.path.isfile(repo_path + '/.git/index.lock'):
os.remove(repo_path + '/.git/index.lock')
sub.run('git stash', shell=True)
os.chdir(cwd)
def cleanTouchSet():
# Remove old touchset
if os.path.isdir(TOUCH_SET_DIR):
print ('Clean touch set')
shutil.rmtree(TOUCH_SET_DIR)
os.makedirs(TOUCH_SET_DIR)
def isPrefixRepoCached(example, config, cached_repos_dir=CACHED_REPOS_DIR):
if not os.path.isdir(cached_repos_dir + '/' + example):
return False
for repo in os.listdir(cached_repos_dir + '/' + example):
if repo == config:
return True
return False
def cachePrefixRepoIfNotAlreadyCached(example, config, repo_path, \
cached_repos_dir=CACHED_REPOS_DIR):
# if already cached, do nothing
if isPrefixRepoCached(example, config):
return
# cache the repo
example_dir = cached_repos_dir + '/' + example
#shutil.copytree(repo_path, example_dir + '/' + config)
if not os.path.isdir(example_dir):
os.makedirs(example_dir)
sub.run('cp -r ' + repo_path + ' ' + example_dir + '/' + config, shell=True)
def getEndSHAFrombranch(example, config, branch, cached_repos_dir=CACHED_REPOS_DIR):
os.chdir(cached_repos_dir + '/' + example + '/' + config)
p = sub.Popen('git log ' + branch + ' -1', shell=True, stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
lines = p.stdout.readlines()
end_sha = lines[0].decode("utf-8")[:-1].split()[1][:7]
return end_sha
def isSuffixExist(suffix, example, suffix_sharing_cache_dir=SUFFIX_SHARING_CACHE_DIR):
suffix_dir = suffix_sharing_cache_dir + '/' + example + '/' + suffix
if os.path.isdir(suffix_dir):
return True
else:
return False
def isCSlicerLog(log_file):
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if lines[i].startswith('[STATS] test.count : '):
return True
return False
def isDefinerLog(log_file):
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if lines[i].startswith('[STATS] hstar.length : '):
return True
return False
def isSplitLog(log_file):
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if lines[i].startswith('[AFTER SPLIT] '):
return True
return False
def isCommitLevel(log_file):
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if lines[i].startswith('TEST: '):
if len(lines[i].split(' : ')[1].split()) == 1:
continue
if lines[i].split(' : ')[1].startswith('[') and \
lines[i].split(' : ')[1].split()[-2].endswith(']'):
return False
if lines[i].startswith('[OUTPUT] H*: '):
if len(lines[i].split(' : ')[1].split()) == 1:
continue
if lines[i].split(' : ')[1].startswith('[') and \
lines[i].split(' : ')[1].split()[-2].endswith(']'):
return False
if lines[i].startswith('[AFTER SPLIT] '):
return False
return True
def isFileLevel(log_file):
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
for i in range(len(lines)):
if lines[i].startswith('TEST: '):
if len(lines[i].split(' : ')[1].split()) == 1:
continue
if lines[i].split(' : ')[1].startswith('[') and \
lines[i].split(' : ')[1].split()[-2].endswith(']'):
return True
if lines[i].startswith('[OUTPUT] H*: '):
if len(lines[i].split(' : ')[1].split()) == 1:
continue
if lines[i].split(' : ')[1].startswith('[') and \
lines[i].split(' : ')[1].split()[-2].endswith(']'):
return True
if lines[i].startswith('[AFTER SPLIT] '):
return True
return False
# May change to hash implementation later
def extractSliceFromCommitLevelLog(log_file):
if isCSlicerLog(log_file):
cmt_msgs = extractHistorySliceFromCSlicerLog(log_file)[1]
elif isDefinerLog(log_file):
cmt_msgs = extractHistorySliceFromDefinerLog(log_file)[1]
else: # orig hist
cmt_msgs = extractHistorySliceFromOrigHistory(log_file)[1]
for i in range(len(cmt_msgs)):
cmt_msgs[i] = cmt_msgs[i].replace('\"', '')
return cmt_msgs
# May change to hash implementation later
def extractSliceSHAsAndMsgsFromCommitLevelLog(example, log_file):
if isCSlicerLog(log_file):
shas, cmt_msgs = extractHistorySliceFromCSlicerLog(log_file)
elif isDefinerLog(log_file):
shas, cmt_msgs = extractHistorySliceFromDefinerLog(log_file)
else: # orig hist
shas, cmt_msgs = extractHistorySliceFromOrigHistory(log_file)
for i in range(len(cmt_msgs)):
cmt_msgs[i] = cmt_msgs[i].replace('\"', '')
return shas, cmt_msgs
# May change to hash implementation later
def extractSliceFromFileLevelLog(log_file):
if isCSlicerLog(log_file):
cmt_msgs = extractHistorySliceFromCSlicerLog(log_file)[1]
elif isDefinerLog(log_file):
cmt_msgs = extractHistorySliceFromDefinerLog(log_file)[1]
elif isSplitLog(log_file):
cmt_msgs = extractHistorySliceFromSplitLog(log_file)[1]
for i in range(len(cmt_msgs)):
msg = cmt_msgs[i]
last_bracket_idx = ' '.join(msg.split()[1:]).rfind(']')
msg_without_sha = (' '.join(msg.split()[1:]))[:last_bracket_idx] + \
(' '.join(msg.split()[1:]))[last_bracket_idx+1:]
msg_without_sha = msg_without_sha.replace('\"', '')
cmt_msgs[i] = msg_without_sha
return cmt_msgs
# May change to hash implementation later
def extractSliceSHAsAndMsgsFromFileLevelLog(example, log_file):
if isCSlicerLog(log_file):
shas, cmt_msgs = extractHistorySliceFromCSlicerLog(log_file)
elif isDefinerLog(log_file):
shas, cmt_msgs = extractHistorySliceFromDefinerLog(log_file)
elif isSplitLog(log_file):
shas, cmt_msgs = extractHistorySliceFromSplitLog(log_file)
for i in range(len(cmt_msgs)):
msg = cmt_msgs[i]
f = msg.strip().split()[-1]
files.append(f)
last_bracket_idx = ' '.join(msg.split()[1:]).rfind(']')
msg_without_sha = (' '.join(msg.split()[1:]))[:last_bracket_idx] + \
(' '.join(msg.split()[1:]))[last_bracket_idx+1:]
msg_without_sha = msg_without_sha.replace('\"', '')
cmt_msgs[i] = msg_without_sha
return shas, cmt_msgs
def searchSHAFromOrigHistUsingCmtMsgs(example, cmt_msg, orig_history_dir=ORIG_HISTORY_DIR):
print (example, cmt_msg)
orig_hist_file = orig_history_dir + '/' + example + '.hist'
fr = open(orig_hist_file, 'r')
lines = fr.readlines()
fr.close()
cadidate_shas = []
for i in range(len(lines)):
if cmt_msg.replace(' ', '').replace('`ZipArchiveEntry`', '') in \
lines[i].replace('\"', '').replace(' ', '').replace('`ZipArchiveEntry`', ''):
sha = lines[i].split()[0]
cadidate_shas.append(sha)
return cadidate_shas
def getOriginalHistory(start, end, repo_path): # include end, exclude start
os.chdir(repo_path)
p = sub.Popen('git --no-pager log ' + start + '..' + end + ' --oneline', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
commits = p.stdout.readlines()
orig_history = []
for commit in commits:
sha = commit.decode("utf-8")[:-1].strip().split(' ')[0]
#print (sha)
p = sub.Popen('git --no-pager log --oneline ' + sha + ' -1', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
commit_messages = p.stdout.readlines()
msg = ''
for msg_line in commit_messages:
msg_line = msg_line.decode("utf-8")[:-1]
msg += msg_line
orig_history.append(msg)
return orig_history
def isStateMatch(current_log, suffix, example, \
start=None, end=None, repo_path=None, \
suffix_sharing_cache_dir=SUFFIX_SHARING_CACHE_DIR):
cached_configs = os.listdir(suffix_sharing_cache_dir + '/' + example + '/' + suffix)
for config in cached_configs:
cached_log = suffix_sharing_cache_dir + '/' + example + '/' + suffix + '/' + \
config + '/states.log'
# see if entire config can be saved
if current_log == None:
current_slice = [' '.join(cmt.split()[1:]).replace('\"', '') for cmt in \
getOriginalHistory(start, end, repo_path)]
if isCommitLevel(cached_log):
cached_slice = extractSliceFromCommitLevelLog(cached_log)
elif isFileLevel(cached_log):
cached_slice = extractSliceFromFileLevelLog(cached_log)
if current_slice == cached_slice:
return True, config.replace('savedby-', '')
else:
return False, None
if isCommitLevel(current_log) and isFileLevel(cached_log):
continue
if isFileLevel(current_log) and isCommitLevel(cached_log):
continue
if isCommitLevel(current_log) and isCommitLevel(cached_log):
current_slice = extractSliceFromCommitLevelLog(current_log)
cached_slice = extractSliceFromCommitLevelLog(cached_log)
if current_slice == cached_slice:
return True, config.replace('savedby-', '')
else:
continue
if isFileLevel(current_log) and isFileLevel(cached_log):
current_slice = extractSliceFromFileLevelLog(current_log)
cached_slice = extractSliceFromFileLevelLog(cached_log)
if current_slice == cached_slice:
return True, config.replace('savedby-', '')
else:
continue
return False, None
def copyTheSliceFromOneConfigLogToFinalLog(config, example, dest_log, output_dir=OUTPUT_DIR):
# find out saved by which config, then copy the slice and time of that config.
config_which_saving_cache = config
source_log = output_dir + '/' + config_which_saving_cache + '/' + example + '.log'
slice_lines = ''
slice_lines += 'COPIED FROM: ' + config_which_saving_cache.replace('savedby-', '') + '\n'
fr = open(source_log)
lines = fr.readlines()
fr.close()
if isCSlicerLog(source_log):
for i in range(len(lines)):
if lines[i].startswith('[OUTPUT] Results:'):
for j in range(i, len(lines)):
if ' Exec Time]: ' in lines[j]: # do not copy exec time
continue
slice_lines += lines[j]
break
elif isDefinerLog(source_log):
for i in range(len(lines)):
if lines[i].startswith('[OUTPUT] H*: '):
for j in range(i-1, len(lines)):
if ' Exec Time]: ' in lines[j]: # do not copy exec time
continue
slice_lines += lines[j]
break
fw = open(dest_log, 'w')
fw.write(slice_lines)
fw.close()
def genSplitLogFile(example, config, start, repo_path, branch, \
split_temp_file=SPLIT_TEMP_FILE, output_dir=OUTPUT_DIR):
cwd = os.getcwd()
os.chdir(repo_path)
p = sub.Popen('git --no-pager log ' + branch + ' --oneline -1', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
file_level_end_commit = p.stdout.readlines()[0].decode("utf-8").split()[0]
end = file_level_end_commit
sub.run('git --no-pager log ' + start + '..' + end + ' --oneline', shell=True, \
stdout=open(split_temp_file, 'w'), stderr=sub.STDOUT)
fr = open(split_temp_file, 'r')
commits = fr.readlines()
fr.close()
for i in range(len(commits)):
cmt = commits[i]
commits[i] = '[AFTER SPLIT] : ' + cmt
# write the splitted history to file
split_log = output_dir + '/' + config + '/' + example + '.log.split'
fw = open(split_log, 'w')
fw.write(''.join(commits))
fw.close()
os.chdir(cwd)
return split_log
def extractHistorySliceFromSplitLog(log_file):
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
shas = []
msgs = []
for i in range(len(lines)):
if lines[i].startswith('[AFTER SPLIT] : '):
sha = lines[i].split(' : ')[1].split()[0]
msg = lines[i].split(' : ')[1].split()[1]
shas.append(sha)
msgs.append(msg)
return shas, msgs
def extractHistorySliceFromOrigHistory(log_file):
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
shas = []
msgs = []
for i in range(len(lines)):
if not lines[i].startswith('CACHED BY:'):
sha = lines[i].split()[0]
msg = ' '.join(lines[i].split()[1:])
shas.append(sha)
msgs.append(msg)
return shas, msgs
def cacheSuffixIfNotAlreadyCached(example, config, suffix, log_file, \
suffix_sharing_cache_dir=SUFFIX_SHARING_CACHE_DIR):
suffix_dir = suffix_sharing_cache_dir + '/' + example + '/' + suffix + '/' + 'savedby-' + \
config
if os.path.isdir(suffix_dir):
return
os.makedirs(suffix_dir)
fr = open(log_file, 'r')
lines = fr.readlines()
fr.close()
lines.insert(0, 'CACHED BY: ' + config + '\n')
fw = open(suffix_dir + '/states.log', 'w')
fw.write(''.join(lines))
fw.close()
def getSubModulePathForAGivenProject(example):
if example in ['CALCITE-627', 'CALCITE-758', 'CALCITE-811', 'CALCITE-803', 'CALCITE-991',
'CALCITE-1288', 'CALCITE-1309']:
submodule_path = '/core'
elif example in ['CALCITE-655', 'CALCITE-718']:
submodule_path = '/avatica-server'
elif example in ['CALCITE-767']:
submodule_path = '/avatica'
elif example in ['MNG-4904', 'MNG-4910', 'MNG-5530', 'MNG-5549']:
submodule_path = '/maven-core'
elif example in ['MNG-4909']:
submodule_path = '/maven-model-builder'
elif example in ['FLUME-2052', 'FLUME-2056', 'FLUME-2130', 'FLUME-2628', 'FLUME-2982']:
submodule_path = '/flume-ng-core'
elif example in ['FLUME-2206']:
submodule_path = '/flume-ng-sinks/flume-ng-elasticsearch-sink'
elif example in ['FLUME-2498', 'FLUME-2955']:
submodule_path = '/flume-ng-sources/flume-taildir-source'
elif example in ['FLUME-1710']:
submodule_path = '/flume-ng-sdk'
elif example.startswith('PDFBOX'):
submodule_path = '/pdfbox'
else:
submodule_path = '' # single-module project
return submodule_path
def getTargetPathForAGivenProject(example):
submodule_path = getSubModulePathForAGivenProject(example)
return submodule_path + '/target'
def cacheTargetDirForCSlicer2(example, repo_path, cached_repos_dir=CACHED_REPOS_DIR):
target_path = getTargetPathForAGivenProject(example)
if isPrefixRepoCached(example, 'target'):
shutil.rmtree(cached_repos_dir + '/' + example + '/target')
shutil.copytree(repo_path + target_path, cached_repos_dir + '/' + example + '/target')
def copyTargetDirBackForCSlicer2(example, repo_path, cached_repos_dir=CACHED_REPOS_DIR):
target_path = getTargetPathForAGivenProject(example)
if os.path.isdir(repo_path + target_path):
shutil.rmtree(repo_path + target_path)
shutil.copytree(cached_repos_dir + '/' + example + '/target', repo_path + target_path)
def runCSlicerStandalone(example):
print ('Starting Example :' + example)
start_time = time.time()
# extract info from cslicer orig config file
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
if os.path.isdir(repo_path):
print ('remove old repo')
shutil.rmtree(repo_path)
shutil.copytree(repo_path + '-cache', repo_path)
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
cslicer_orig_log = CSLICER_STANDALONE_OUTPUT_DIR + '/' + example + '.log'
runCSlicerTool(cslicer_orig_log, config_file, 'orig')
# -------------------------------- cslicer end -------------------------------------
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
putTimeinLog(cslicer_orig_log, run_time)
countChangedLines(cslicer_orig_log, repo_path, 'cslicer')
#backupRepoForDebugging(example, repo_path)
def runDefinerStandalone(example):
print ('Starting Example :' + example)
start_time = time.time()
# extract info from config file
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
if os.path.isdir(repo_path):
print ('remove old repo')
shutil.rmtree(repo_path)
shutil.copytree(repo_path + '-cache', repo_path)
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer, save temp logs
definer_log = DEFINER_STANDALONE_OUTPUT_DIR + '/' + example + '.log'
runDefinerTool(definer_log, config_file, 'definerorig')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# -------------------------------- definer end -------------------------------------
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
putTimeinLog(definer_log, run_time)
countChangedLines(definer_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
@with_goto
def runSplitCSlicer(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=SPLIT_CSLICER_OUTPUT_DIR, \
configs_dir=SPLIT_CSLICER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
label .split
# -------------------------------- split start -------------------------------------
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-split')
# generate split log file
split_log = genSplitLogFile(example, config='split-cslicer', start=start, \
repo_path=repo_path, branch='after-split')
split_end_time = time.time()
split_exec_time = split_end_time - start_time
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .cslicer
# -------------------------------- cslicer start -------------------------------------
# generate new config files for splitted history
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-split')
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer on splitted history, save logs
cslicer_split_log = output_dir + '/' + example + '.log'
runCSlicerTool(cslicer_split_log, split_config_file, 'after-split')
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_split_log)
# for NET-525, NET-527 (how to do in split level?)
# if example == 'NET-525' or example == 'NET-527':
# cslicer_history_slice.append('4379a681')
# commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-split-cslicer')
cachePrefixRepoIfNotAlreadyCached(example, 'split-cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - split_end_time
countChangedLines(cslicer_split_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
final_log = cslicer_split_log
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
countChangedLines(final_log, repo_path, 'cslicer')
#backupRepoForDebugging(example, repo_path)
cleanTempLogs()
@with_goto
def runSplitDefiner(example, share_prefix, share_suffix, orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, output_dir=SPLIT_DEFINER_OUTPUT_DIR, \
configs_dir=SPLIT_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'split'):
is_run_from_cache =True
split_end_time = start_time
split_exec_time = 'NOT RUN'
goto .definer
else: # cache not exist
is_run_from_cache = False
goto .split
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-definer', example):
is_match, matched_config = isStateMatch(None, 'split-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='split-definer', suffix='split-definer', \
log_file=orig_history_file)
label .split
# -------------------------------- split start -------------------------------------
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-split')
# cache intermediate repo after split (S)
cachePrefixRepoIfNotAlreadyCached(example, 'split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='split-definer', start=start, \
repo_path=repo_path, branch='after-split')
split_end_time = time.time()
split_exec_time = split_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(split_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-definer', suffix='definer', \
log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
# generate new config files for splitted history
_, end, _, _, test_suite, _, lines, _ = extractInfoFromDefinerConfigs(example)
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-split')
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer on splitted history, save logs
definer_log = output_dir + '/' + example + '.log'
runDefinerTool(definer_log, split_config_file, 'after-split')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# cherry-pick history slice to a new branch
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-split-definer')
# cache intermediate repo after split-definer (SD)
cachePrefixRepoIfNotAlreadyCached(example, 'split-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - split_end_time
final_log = definer_log
# -------------------------------- definer end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
def runCSlicerSplitCSlicer(example, regenerate=False):
print ('Starting Example :' + example)
start_time = time.time()
# extract info from cslicer orig config file
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
if os.path.isdir(repo_path):
print ('remove old repo')
shutil.rmtree(repo_path)
shutil.copytree(repo_path + '-cache', repo_path)
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer on original history, save temp logs
cslicer_temp_log = CSLICER_SPLIT_CSLICER_OUTPUT_DIR + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_temp_log, config_file, 'orig')
# delete orig branch
sub.run('git checkout trunk', shell=True)
sub.run('git checkout master', shell=True)
sub.run('git branch -D orig', shell=True)
# -------------------------------- cslicer end -------------------------------------
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_temp_log)
# for NET-525, NET-527
if example == 'NET-525' or example == 'NET-527':
cslicer_history_slice.append('4379a681')
commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'aftercslicer')
## --- re-generate jacoco at the new end commit of phase 1
if regenerate:
# shutil.copyfile(repo_path + '/pom.xml', \
# CSLICER_SPLIT_CSLICER_SECOND_PHASE_POM_DIR + '/' + example + '.pom.xml')
runTestsGenJacoco(example, end, repo_path, test_suite, \
poms_dir=CSLICER_SPLIT_CSLICER_SECOND_PHASE_POM_DIR)
## ---
# split commits by file
splitCommitsByFile(example, repo_path, start, end)
# generate new config files for splitted history
split_config_file = \
genSplittedConfigFile(example, repo_path, lines, CSLICER_SPLIT_CSLICER_CONFIGS_DIR)
# run cslicer on splitted history, save logs
cslicer_split_log = CSLICER_SPLIT_CSLICER_OUTPUT_DIR + '/' + example + '.log'
runCSlicerTool(cslicer_split_log, split_config_file, 'filelevel')
# -------------------------------- cslicer end -------------------------------------
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
putTimeinLog(cslicer_split_log, run_time)
countChangedLines(cslicer_split_log, repo_path, 'cslicer')
#backupRepoForDebugging(example, repo_path)
# clean temp logs
cleanTempLogs()
@with_goto
def runCSlicerSplitDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=CSLICER_SPLIT_DEFINER_OUTPUT_DIR, \
configs_dir=CSLICER_SPLIT_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'cslicer-split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-split', repo_path)
elif isPrefixRepoCached(example, 'cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'cslicer-split'):
is_run_from_cache =True
split_end_time = start_time
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
goto .definer
elif isPrefixRepoCached(example, 'cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
goto .split
else: # cache not exist
is_run_from_cache = False
goto .cslicer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-split-definer', example):
is_match, matched_config = isStateMatch(None, 'cslicer-split-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer', \
suffix='cslicer-split-definer', log_file=orig_history_file)
label .cslicer
# -------------------------------- cslicer start -------------------------------------
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer on original history, save temp logs
cslicer_temp_log = output_dir + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_temp_log, config_file, 'orig')
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_temp_log)
# for NET-525, NET-527
if example == 'NET-525' or example == 'NET-527':
cslicer_history_slice.append('4379a681')
commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer')
# cache intermediate repo after cslicer (C)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-definer', example):
is_match, matched_config = isStateMatch(cslicer_temp_log, 'split-definer', example)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer', \
suffix='split-definer', log_file=cslicer_temp_log)
countChangedLines(cslicer_temp_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .split
# -------------------------------- split start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer', branch='after-cslicer')
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-cslicer-split')
# cache intermediate repo after cslicer-split (CS)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='cslicer-split-definer', start=start, \
repo_path=repo_path, branch='after-cslicer-split')
split_end_time = time.time()
split_exec_time = split_end_time - cslicer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(split_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer', suffix='definer', \
log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
# generate new config files for splitted history
_, end, _, _, test_suite, _, lines, _ = extractInfoFromDefinerConfigs(example)
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-cslicer-split')
# run definer on splitted history, save logs
definer_log = output_dir + '/' + example + '.log'
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, split_config_file, 'after-cslicer-split')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# cherry-pick history slice to a new branch
definer_history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(definer_history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, definer_history_slice, commit_msg_list, \
'after-cslicer-split-definer')
# cache intermediate repo after cslicer-definer-split-definer (CSD)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-split-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - split_end_time
final_log = definer_log
# -------------------------------- definer end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
# clean temp logs
cleanTempLogs()
@with_goto
def runDefinerSplitCSlicer(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=DEFINER_SPLIT_CSLICER_OUTPUT_DIR, \
configs_dir=DEFINER_SPLIT_CSLICER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
# extract info from config file
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
label .definer
# -------------------------------- definer start -------------------------------------
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer, save temp logs
definer_log = output_dir + '/' + example + '.log.phase1'
runDefinerTool(definer_log, config_file, 'definerorig')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
split_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, 'after-definer')
definer_end_time = time.time()
definer_exec_time = definer_end_time - start_time
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .split
# -------------------------------- split start -------------------------------------
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-definer-split')
# generate split log file
split_log = genSplitLogFile(example, config='definer-split-cslicer', start=start, \
repo_path=repo_path, branch='after-definer-split')
split_end_time = time.time()
split_exec_time = split_end_time - definer_end_time
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .cslicer
# -------------------------------- cslicer start -------------------------------------
# generate new config files for splitted history
_, end, _, test_suite, _, lines, _ = extractInfoFromCSlicerConfigs(example)
split_config_file = \
genSplittedConfigFile(example, repo_path, lines, configs_dir, 'after-definer-split')
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer on split history, save logs
cslicer_log = output_dir + '/' + example + '.log'
runCSlicerTool(cslicer_log, split_config_file, 'after-definer-split')
history_slice, commit_msg_list = extractHistorySliceFromCSlicerLog(cslicer_log)
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-definer-split-cslicer')
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - split_end_time
countChangedLines(cslicer_log, repo_path, 'cslicer')
final_log = cslicer_log
# -------------------------------- cslicer end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
countChangedLines(final_log, repo_path, 'cslicer')
#backupRepoForDebugging(example, repo_path)
# clean temp logs
cleanTempLogs()
@with_goto
def runDefinerSplitDefiner(example, share_prefix, share_suffix, \
cached_repos_dir=CACHED_REPOS_DIR, \
orig_history_dir=ORIG_HISTORY_DIR, \
output_dir=DEFINER_SPLIT_DEFINER_OUTPUT_DIR, \
configs_dir=DEFINER_SPLIT_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
# extract info from config file
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if prefix cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'definer-split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer-split', repo_path)
elif isPrefixRepoCached(example, 'definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer', repo_path)
else:
# no cached repo, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'definer-split'):
is_run_from_cache =True
split_end_time = start_time
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'definer'):
is_run_from_cache =True
definer_end_time = start_time
definer_exec_time = 'NOT RUN'
goto .split
else: # prefix cache not exist
is_run_from_cache = False
goto .definer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-split-definer', example):
is_match, matched_config = isStateMatch(None, 'definer-split-definer', example, \
start=start, end=end, repo_path=repo_path)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='definer-split-definer', \
suffix='definer-split-definer', log_file=orig_history_file)
label .definer
# -------------------------------- definer start -------------------------------------
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer, save temp logs
definer_log = output_dir + '/' + example + '.log.phase1'
runDefinerTool(definer_log, config_file, 'definerorig')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
definer_history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(definer_history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, definer_history_slice, commit_msg_list, \
'after-definer')
# cache prefix: intermediate repo after definer (D)
cachePrefixRepoIfNotAlreadyCached(example, 'definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-definer', example):
is_match, matched_config = isStateMatch(definer_log, 'split-definer', example)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='definer-split-definer', \
suffix='split-definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .split
# -------------------------------- split start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='definer', branch='after-definer')
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-definer-split')
# cache intermediate repo after definer-split (DS)
cachePrefixRepoIfNotAlreadyCached(example, 'definer-split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='definer-split-definer', start=start, \
repo_path=repo_path, branch='after-definer-split')
split_end_time = time.time()
split_exec_time = split_end_time - definer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(split_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix
cacheSuffixIfNotAlreadyCached(example, config='definer-split-definer', suffix='definer', \
log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
# generate new config files for splitted history
_, end, _, _, test_suite, _, lines, _ = extractInfoFromDefinerConfigs(example)
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-definer-split')
definer_log = output_dir + '/' + example + '.log'
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# Run definer on splitted history
runDefinerTool(definer_log, split_config_file, 'after-definer-split')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# cherry-pick history slice to a new branch, reset start and end
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-definer-split-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'definer-split-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - split_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
# clean temp logs
cleanTempLogs()
@with_goto
def runCSlicerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=CSLICER_DEFINER_OUTPUT_DIR, \
configs_dir=CSLICER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
# remove the old repo in _downloads dir
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
label .cslicer
# -------------------------------- cslicer start -------------------------------------
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
cslicer_temp_log = output_dir + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_temp_log, config_file, 'orig')
# delete orig branch
sub.run('git checkout trunk', shell=True)
sub.run('git checkout master', shell=True)
sub.run('git branch -D orig', shell=True)
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_temp_log)
# for NET-525, NET-527
if example == 'NET-525' or example == 'NET-527':
cslicer_history_slice.append('4379a681')
commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer')
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - start_time
countChangedLines(cslicer_temp_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, TEMP_CONFIGS_DIR)
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# move all untracked test files to temp dir (for running jacoco needed)-----------
p = sub.Popen('git ls-files --others --exclude-standard', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
lines = p.stdout.readlines()
for i in range(len(lines)):
lines[i] = lines[i].decode("utf-8")[:-1]
if lines[i].startswith('src/test/'):
dir_structure = '/'.join(lines[i].strip().split('/')[:-1])
dest_dir = TEMP_FILES_DIR + '/' + dir_structure
if os.path.isdir(dest_dir):
shutil.rmtree(dest_dir)
os.makedirs(dest_dir)
shutil.move(lines[i].strip(), dest_dir)
#os.remove(lines[i].strip())
# -------------------------------------------------------------------------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
definer_history_slice, commit_msg_list = \
extractHistorySliceFromDefinerLog(definer_log)
if len(definer_history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, definer_history_slice, commit_msg_list, \
'after-cslicer-definer')
# cache intermediate repo after cslicer-definer (CD)
definer_end_time = time.time()
definer_exec_time = definer_end_time - cslicer_end_time
final_log = definer_log
# -------------------------------- definer end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
cleanTempLogs()
@with_goto
def runSplitCSlicerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=SPLIT_CSLICER_DEFINER_OUTPUT_DIR, \
configs_dir=SPLIT_CSLICER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'split-cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split-cslicer', repo_path)
elif isPrefixRepoCached(example, 'split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'split-cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
split_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
goto .definer
elif isPrefixRepoCached(example, 'split'):
is_run_from_cache =True
split_end_time = start_time
split_exec_time = 'NOT RUN'
goto .cslicer
else: # cache not exist
is_run_from_cache = False
goto .split
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-cslicer-definer', example):
is_match, matched_config = isStateMatch(None, 'split-cslicer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='split-cslicer-definer', \
suffix='split-cslicer-definer', \
log_file=orig_history_file)
label .split
# -------------------------------- split start -------------------------------------
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-split')
# cache intermediate repo after cslicer-definer-split (CDS)
cachePrefixRepoIfNotAlreadyCached(example, 'split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='split-cslicer-definer', start=start, \
repo_path=repo_path, branch='after-split')
split_end_time = time.time()
split_exec_time = split_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer', example):
is_match, matched_config = isStateMatch(split_log, 'cslicer-definer', example)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-cslicer-definer', \
suffix='cslicer-definer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .cslicer
# -------------------------------- cslicer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='split', branch='after-split')
# generate new config files for splitted history
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-split')
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer on splitted history, save logs
cslicer_split_log = output_dir + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_split_log, split_config_file, 'after-split')
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_split_log)
# for NET-525, NET-527 (how to do in split level?)
# if example == 'NET-525' or example == 'NET-527':
# cslicer_history_slice.append('4379a681')
# commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-split-cslicer')
cachePrefixRepoIfNotAlreadyCached(example, 'split-cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - split_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(cslicer_split_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-cslicer-definer', suffix='definer', \
log_file=cslicer_split_log)
countChangedLines(cslicer_split_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='split-cslicer', branch='after-split-cslicer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-split-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch, reset start and end
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-split-cslicer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'split-cslicer-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - cslicer_end_time
final_log = definer_log
# -------------------------------- definer end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
cleanTempLogs()
@with_goto
def runCSlicerDefinerSplitCSlicer(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=CSLICER_DEFINER_SPLIT_CSLICER_OUTPUT_DIR, \
configs_dir=CSLICER_DEFINER_SPLIT_CSLICER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
# remove the old repo in _downloads dir
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'cslicer-definer-split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-definer-split', \
repo_path)
elif isPrefixRepoCached(example, 'cslicer-definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-definer', repo_path)
elif isPrefixRepoCached(example, 'cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'cslicer-definer-split'):
is_run_from_cache =True
split_end_time = start_time
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
goto .cslicer2
elif isPrefixRepoCached(example, 'cslicer-definer'):
is_run_from_cache =True
definer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
goto .split
elif isPrefixRepoCached(example, 'cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
goto .definer
else: # cache not exist
is_run_from_cache = False
goto .cslicer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer-split-cslicer', example):
is_match, matched_config = isStateMatch(None, 'cslicer-definer-split-cslicer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-cslicer', \
suffix='cslicer-definer-split-cslicer', \
log_file=orig_history_file)
label .cslicer
# -------------------------------- cslicer start -------------------------------------
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# copy target dir because we will run CSlicer again later
# cache target dir, otherwise we cannot run CSlicer2 in the middle
cacheTargetDirForCSlicer2(example, repo_path)
cslicer_temp_log = output_dir + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_temp_log, config_file, 'orig')
# delete orig branch
sub.run('git checkout trunk', shell=True)
sub.run('git checkout master', shell=True)
sub.run('git branch -D orig', shell=True)
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_temp_log)
# for NET-525, NET-527
if example == 'NET-525' or example == 'NET-527':
cslicer_history_slice.append('4379a681')
commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer')
# cache intermediate repo after cslicer (C)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-split-cslicer', example):
is_match, matched_config = isStateMatch(cslicer_temp_log, 'definer-split-cslicer', \
example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-cslicer', \
suffix='definer-split-cslicer', log_file=cslicer_temp_log)
countChangedLines(cslicer_temp_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer', branch='after-cslicer')
# temp definer config file (CZ: we may change in the future to keep all the temp files)
definer_config_file = updateDefinerConfig(example, end, TEMP_CONFIGS_DIR)
definer_log = output_dir + '/' + example + '.log.phase2'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# move all untracked test files to temp dir (for running jacoco needed)-----------
p = sub.Popen('git ls-files --others --exclude-standard', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
lines = p.stdout.readlines()
for i in range(len(lines)):
lines[i] = lines[i].decode("utf-8")[:-1]
if lines[i].startswith('src/test/'):
dir_structure = '/'.join(lines[i].strip().split('/')[:-1])
dest_dir = TEMP_FILES_DIR + '/' + dir_structure
if os.path.isdir(dest_dir):
shutil.rmtree(dest_dir)
os.makedirs(dest_dir)
shutil.move(lines[i].strip(), dest_dir)
#os.remove(lines[i].strip())
# -------------------------------------------------------------------------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
definer_history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(definer_history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
split_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, definer_history_slice, commit_msg_list, \
'after-cslicer-definer')
# cache intermediate repo after cslicer-definer (CD)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - cslicer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-cslicer', example):
is_match, matched_config = isStateMatch(definer_log, 'split-cslicer', example)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-cslicer', \
suffix='split-cslicer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .split
# -------------------------------- split start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer-definer', \
branch='after-cslicer-definer')
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-cslicer-definer-split')
# cache intermediate repo after cslicer-definer-split (CDS)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer-split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='cslicer-definer-split-cslicer', start=start, \
repo_path=repo_path, branch='after-cslicer-definer-split')
split_end_time = time.time()
split_exec_time = split_end_time - definer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer', example):
is_match, matched_config = isStateMatch(split_log, 'cslicer', example)
if is_match:
is_suffix_skipped = True
cslicer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-cslicer', \
suffix='cslicer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .cslicer2
# -------------------------------- cslicer2 start -------------------------------------
# generate new config files for splitted history
_, end, _, _, _, lines, _ = extractInfoFromCSlicerConfigs(example)
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-cslicer-definer-split')
# move untracked files back
os.chdir(repo_path)
for dir_path, subpaths, files in os.walk(TEMP_FILES_DIR):
for f in files:
if '/src/test' in dir_path:
shutil.copy(dir_path + '/' + f, \
repo_path + dir_path[dir_path.index('/src/test'):])
# copy target dir back (required by CSlicer)
copyTargetDirBackForCSlicer2(example, repo_path)
# run cslicer on split history, save logs
cslicer_log = output_dir + '/' + example + '.log'
runCSlicerTool(cslicer_log, split_config_file, 'after-cslicer-definer-split')
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_log)
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer-definer-split-cslicer')
# cache intermediate repo after cslicer (CDSC)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer-split-cslicer', repo_path)
cslicer2_end_time = time.time()
cslicer2_exec_time = cslicer2_end_time - split_end_time
final_log = cslicer_log
# -------------------------------- cslicer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[CSlicer2 Exec Time]'] = cslicer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'cslicer')
#backupRepoForDebugging(example, repo_path)
cleanTempLogs()
@with_goto
def runCSlicerDefinerSplitDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=CSLICER_DEFINER_SPLIT_DEFINER_OUTPUT_DIR, \
configs_dir=CSLICER_DEFINER_SPLIT_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
# remove the old repo in _downloads dir
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'cslicer-definer-split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-definer-split', \
repo_path)
elif isPrefixRepoCached(example, 'cslicer-definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-definer', repo_path)
elif isPrefixRepoCached(example, 'cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'cslicer-definer-split'):
is_run_from_cache =True
split_end_time = start_time
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'cslicer-definer'):
is_run_from_cache =True
definer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
goto .split
elif isPrefixRepoCached(example, 'cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
goto .definer
else: # cache not exist
is_run_from_cache = False
goto .cslicer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer-split-definer', example):
is_match, matched_config = isStateMatch(None, 'cslicer-definer-split-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-definer', \
suffix='cslicer-definer-split-definer', \
log_file=orig_history_file)
label .cslicer
# -------------------------------- cslicer start -------------------------------------
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
cslicer_temp_log = output_dir + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_temp_log, config_file, 'orig')
# delete orig branch
sub.run('git checkout trunk', shell=True)
sub.run('git checkout master', shell=True)
sub.run('git branch -D orig', shell=True)
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_temp_log)
# for NET-525, NET-527
if example == 'NET-525' or example == 'NET-527':
cslicer_history_slice.append('4379a681')
commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer')
# cache intermediate repo after cslicer (C)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-split-definer', example):
is_match, matched_config = isStateMatch(cslicer_temp_log, 'definer-split-definer', \
example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-definer', \
suffix='definer-split-definer', log_file=cslicer_temp_log)
countChangedLines(cslicer_temp_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer', branch='after-cslicer')
# temp definer config file (CZ: we may change in the future to keep all the temp files)
definer_config_file = updateDefinerConfig(example, end, TEMP_CONFIGS_DIR)
definer_log = output_dir + '/' + example + '.log.phase2'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# move all untracked test files to temp dir (for running jacoco needed)-----------
p = sub.Popen('git ls-files --others --exclude-standard', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
lines = p.stdout.readlines()
for i in range(len(lines)):
lines[i] = lines[i].decode("utf-8")[:-1]
if lines[i].startswith('src/test/'):
dir_structure = '/'.join(lines[i].strip().split('/')[:-1])
dest_dir = TEMP_FILES_DIR + '/' + dir_structure
if os.path.isdir(dest_dir):
shutil.rmtree(dest_dir)
os.makedirs(dest_dir)
shutil.move(lines[i].strip(), dest_dir)
#os.remove(lines[i].strip())
# -------------------------------------------------------------------------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
definer_history_slice, commit_msg_list = \
extractHistorySliceFromDefinerLog(definer_log)
if len(definer_history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, definer_history_slice, commit_msg_list, \
'after-cslicer-definer')
# cache intermediate repo after cslicer-definer (CD)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - cslicer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-definer', example):
is_match, matched_config = isStateMatch(definer_log, 'split-definer', example)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-definer', \
suffix='split-definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .split
# -------------------------------- split start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer-definer', \
branch='after-cslicer-definer')
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-cslicer-definer-split')
# cache intermediate repo after cslicer-definer-split (CDS)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer-split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='cslicer-definer-split-definer', start=start, \
repo_path=repo_path, branch='after-cslicer-definer-split')
split_end_time = time.time()
split_exec_time = split_end_time - definer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(split_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-definer', \
suffix='definer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
# generate new config files for splitted history
_, end, _, _, test_suite, _, lines, _ = extractInfoFromDefinerConfigs(example)
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-cslicer-definer-split')
# run definer on splitted history, save logs
definer_log = output_dir + '/' + example + '.log'
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, split_config_file, 'after-cslicer-definer-split')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# cherry-pick history slice to a new branch, reset start and end
definer_history_slice, commit_msg_list = \
extractHistorySliceFromDefinerLog(definer_log)
if len(definer_history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, definer_history_slice, commit_msg_list, \
'after-cslicer-definer-split-definer')
# cache intermediate repo after cslicer-definer-split-definer (CDSD)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer-split-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - split_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
cleanTempLogs()
@with_goto
def runSplitCSlicerDefinerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=SPLIT_CSLICER_DEFINER_DEFINER_OUTPUT_DIR, \
configs_dir=SPLIT_CSLICER_DEFINER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'split-cslicer-definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split-cslicer-definer', \
repo_path)
elif isPrefixRepoCached(example, 'split-cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split-cslicer', repo_path)
elif isPrefixRepoCached(example, 'split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'split-cslicer-definer'):
is_run_from_cache =True
definer_end_time = start_time
split_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'split-cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
split_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
goto .definer
elif isPrefixRepoCached(example, 'split'):
is_run_from_cache =True
split_end_time = start_time
split_exec_time = 'NOT RUN'
goto .cslicer
else: # cache not exist
is_run_from_cache = False
goto .split
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-cslicer-definer-definer', example):
is_match, matched_config = isStateMatch(None, 'split-cslicer-definer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='split-cslicer-definer-definer', \
suffix='split-cslicer-definer-definer', \
log_file=orig_history_file)
label .split
# -------------------------------- split start -------------------------------------
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-split')
# cache intermediate repo after cslicer-definer-split (CDS)
cachePrefixRepoIfNotAlreadyCached(example, 'split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='split-cslicer-definer-definer', start=start, \
repo_path=repo_path, branch='after-split')
split_end_time = time.time()
split_exec_time = split_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer-definer', example):
is_match, matched_config = isStateMatch(split_log, 'cslicer-definer-definer', \
example)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-cslicer-definer-definer', \
suffix='cslicer-definer-definer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .cslicer
# -------------------------------- cslicer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='split', branch='after-split')
# generate new config files for splitted history
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-split')
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer on splitted history, save logs
cslicer_split_log = output_dir + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_split_log, split_config_file, 'after-split')
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_split_log)
# for NET-525, NET-527 (how to do in split level?)
# if example == 'NET-525' or example == 'NET-527':
# cslicer_history_slice.append('4379a681')
# commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-split-cslicer')
cachePrefixRepoIfNotAlreadyCached(example, 'split-cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - split_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-definer', example):
is_match, matched_config = isStateMatch(cslicer_split_log, 'definer-definer', \
example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-cslicer-definer-definer', \
suffix='definer-definer', log_file=cslicer_split_log)
countChangedLines(cslicer_split_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='split-cslicer', branch='after-split-cslicer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log.phase2'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-split-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch, reset start and end
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-split-cslicer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'split-cslicer-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - cslicer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(definer_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-cslicer-definer-definer', \
suffix='definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='split-cslicer-definer', \
branch='after-split-cslicer-definer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-split-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch, reset start and end
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-split-cslicer-definer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'split-cslicer-definer-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - definer_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
cleanTempLogs()
@with_goto
def runCSlicerSplitDefinerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=CSLICER_SPLIT_DEFINER_DEFINER_OUTPUT_DIR, \
configs_dir=CSLICER_SPLIT_DEFINER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'cslicer-split-definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-split-definer', \
repo_path)
elif isPrefixRepoCached(example, 'cslicer-split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-split', repo_path)
elif isPrefixRepoCached(example, 'cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'cslicer-split-definer'):
is_run_from_cache =True
definer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'cslicer-split'):
is_run_from_cache =True
split_end_time = start_time
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
goto .definer
elif isPrefixRepoCached(example, 'cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
goto .split
else: # cache not exist
is_run_from_cache = False
goto .cslicer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-split-definer-definer', example):
is_match, matched_config = isStateMatch(None, 'cslicer-split-definer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer-definer', \
suffix='cslicer-split-definer-definer', \
log_file=orig_history_file)
label .cslicer
# -------------------------------- cslicer start -------------------------------------
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer on original history, save temp logs
cslicer_temp_log = output_dir + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_temp_log, config_file, 'orig')
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_temp_log)
# for NET-525, NET-527
if example == 'NET-525' or example == 'NET-527':
cslicer_history_slice.append('4379a681')
commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer')
# cache intermediate repo after cslicer (C)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-definer-definer', example):
is_match, matched_config = isStateMatch(cslicer_temp_log, 'split-definer-definer', \
example)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer-definer', \
suffix='split-definer-definer', log_file=cslicer_temp_log)
countChangedLines(cslicer_temp_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .split
# -------------------------------- split start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer', branch='after-cslicer')
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-cslicer-split')
# cache intermediate repo after cslicer-split (CS)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='cslicer-split-definer-definer', start=start, \
repo_path=repo_path, branch='after-cslicer-split')
split_end_time = time.time()
split_exec_time = split_end_time - cslicer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-definer', example):
is_match, matched_config = isStateMatch(split_log, 'definer-definer', example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer-definer', \
suffix='definer-definer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
# generate new config files for splitted history
_, end, _, _, test_suite, _, lines, _ = extractInfoFromDefinerConfigs(example)
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-cslicer-split')
# run definer on splitted history, save logs
definer_log = output_dir + '/' + example + '.log.phase2'
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, split_config_file, 'after-cslicer-split')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# cherry-pick history slice to a new branch
definer_history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(definer_history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, definer_history_slice, commit_msg_list, \
'after-cslicer-split-definer')
# cache intermediate repo after cslicer-definer-split-definer (CSD)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-split-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - split_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(definer_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer-definer', \
suffix='definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer-split-definer', \
branch='after-cslicer-split-definer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-cslicer-split-definer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-cslicer-split-definer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-split-definer-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - definer_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
# clean temp logs
cleanTempLogs()
@with_goto
def runDefinerSplitCSlicerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=DEFINER_SPLIT_CSLICER_DEFINER_OUTPUT_DIR, \
configs_dir=DEFINER_SPLIT_CSLICER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
# extract info from config file
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'definer-split-cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer-split-cslicer', \
repo_path)
elif isPrefixRepoCached(example, 'definer-split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer-split', repo_path)
elif isPrefixRepoCached(example, 'definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'definer-split-cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'definer-split'):
is_run_from_cache =True
split_end_time = start_time
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
goto .cslicer
elif isPrefixRepoCached(example, 'definer'):
is_run_from_cache =True
definer_end_time = start_time
definer_exec_time = 'NOT RUN'
goto .split
else: # cache not exist
is_run_from_cache = False
goto .definer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-split-cslicer-definer', example):
is_match, matched_config = isStateMatch(None, 'definer-split-cslicer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='definer-split-cslicer-definer', \
suffix='definer-split-cslicer-definer', \
log_file=orig_history_file)
label .definer
# -------------------------------- definer start -------------------------------------
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer, save temp logs
definer_log = output_dir + '/' + example + '.log.phase1'
runDefinerTool(definer_log, config_file, 'definerorig')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
split_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, 'after-definer')
# cache intermediate repo after definer (D)
cachePrefixRepoIfNotAlreadyCached(example, 'definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-cslicer-definer', example):
is_match, matched_config = isStateMatch(definer_log, 'split-cslicer-definer', \
example)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='definer-split-cslicer-definer', \
suffix='split-cslicer-definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .split
# -------------------------------- split start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='definer', branch='after-definer')
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-definer-split')
# cache intermediate repo after definer-split (DS)
cachePrefixRepoIfNotAlreadyCached(example, 'definer-split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='definer-split-cslicer-definer', start=start, \
repo_path=repo_path, branch='after-definer-split')
split_end_time = time.time()
split_exec_time = split_end_time - definer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer', example):
is_match, matched_config = isStateMatch(split_log, 'cslicer-definer', example)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='definer-split-cslicer-definer', \
suffix='cslicer-definer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .cslicer
# -------------------------------- cslicer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='definer-split', branch='after-definer-split')
# generate new config files for splitted history
_, end, _, test_suite, _, lines, _ = extractInfoFromCSlicerConfigs(example)
split_config_file = \
genSplittedConfigFile(example, repo_path, lines, configs_dir, 'after-definer-split')
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer on split history, save logs
cslicer_log = output_dir + '/' + example + '.log.phase2'
runCSlicerTool(cslicer_log, split_config_file, 'after-definer-split')
history_slice, commit_msg_list = extractHistorySliceFromCSlicerLog(cslicer_log)
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-definer-split-cslicer')
cachePrefixRepoIfNotAlreadyCached(example, 'definer-split-cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - split_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(cslicer_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='definer-split-cslicer-definer', \
suffix='definer', log_file=cslicer_log)
countChangedLines(cslicer_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='definer-split-cslicer', \
branch='after-definer-split-cslicer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-definer-split-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch, reset start and end
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-definer-split-cslicer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'definer-split-cslicer-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - cslicer_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
# clean temp logs
cleanTempLogs()
@with_goto
def runDefinerCSlicerSplitDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=DEFINER_CSLICER_SPLIT_DEFINER_OUTPUT_DIR, \
configs_dir=DEFINER_CSLICER_SPLIT_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
# extract info from config file
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'definer-cslicer-split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer-cslicer-split', \
repo_path)
elif isPrefixRepoCached(example, 'definer-cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer-cslicer', repo_path)
elif isPrefixRepoCached(example, 'definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'definer-cslicer-split'):
is_run_from_cache =True
split_end_time = start_time
definer_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'definer-cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
definer_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
goto .split
elif isPrefixRepoCached(example, 'definer'):
is_run_from_cache =True
definer_end_time = start_time
definer_exec_time = 'NOT RUN'
goto .cslicer
else: # cache not exist
is_run_from_cache = False
goto .definer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-cslicer-split-definer', example):
is_match, matched_config = isStateMatch(None, 'definer-cslicer-split-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='definer-cslicer-split-definer', \
suffix='definer-cslicer-split-definer', \
log_file=orig_history_file)
label .definer
# -------------------------------- definer start -------------------------------------
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer, save temp logs
definer_log = output_dir + '/' + example + '.log.phase1'
runDefinerTool(definer_log, config_file, 'definerorig')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, 'after-definer')
# cache intermediate repo after definer (D)
cachePrefixRepoIfNotAlreadyCached(example, 'definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-split-definer', example):
is_match, matched_config = isStateMatch(definer_log, 'cslicer-split-definer', \
example)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='definer-cslicer-split-definer', \
suffix='cslicer-split-definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .cslicer
# -------------------------------- cslicer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='definer', branch='after-definer')
# update cslicer config
cslicer_config_file = updateCSlicerConfig(example, end, configs_dir)
# run tests at the original end commit, generate jacoco files
_, end, _, test_suite, _, lines, _ = extractInfoFromCSlicerConfigs(example)
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer and save logs
cslicer_log = output_dir + '/' + example + '.log.phase2'
runCSlicerTool(cslicer_log, cslicer_config_file, 'after-definer')
history_slice, commit_msg_list = extractHistorySliceFromCSlicerLog(cslicer_log)
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-definer-cslicer')
cachePrefixRepoIfNotAlreadyCached(example, 'definer-cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - definer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-definer', example):
is_match, matched_config = isStateMatch(cslicer_log, 'split-definer', example)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='definer-cslicer-split-definer', \
suffix='split-definer', log_file=cslicer_log)
countChangedLines(cslicer_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .split
# -------------------------------- split start -------------------------------------
# split commits by file
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='definer-cslicer', \
branch='after-definer-cslicer')
splitCommitsByFile(example, repo_path, start, end, 'after-definer-cslicer-split')
# cache intermediate repo after definer-split (DCS)
cachePrefixRepoIfNotAlreadyCached(example, 'definer-cslicer-split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='definer-cslicer-split-definer', start=start, \
repo_path=repo_path, branch='after-definer-cslicer-split')
split_end_time = time.time()
split_exec_time = split_end_time - cslicer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(split_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='definer-cslicer-split-definer', \
suffix='definer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
_, end, _, _, test_suite, _, lines, _ = extractInfoFromDefinerConfigs(example)
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-definer-cslicer-split')
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, split_config_file, 'after-definer-cslicer-split')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch, reset start and end
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-definer-cslicer-split-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'definer-cslicer-split-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - split_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
# clean temp logs
cleanTempLogs()
@with_goto
def runSplitDefinerCSlicerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=SPLIT_DEFINER_CSLICER_DEFINER_OUTPUT_DIR, \
configs_dir=SPLIT_DEFINER_CSLICER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'split-definer-cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split-definer-cslicer', \
repo_path)
elif isPrefixRepoCached(example, 'split-definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split-definer', repo_path)
elif isPrefixRepoCached(example, 'split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'split-definer-cslicer'):
is_run_from_cache = True
cslicer_end_time = start_time
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'split-definer'):
is_run_from_cache = True
definer_end_time = start_time
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
goto .cslicer
elif isPrefixRepoCached(example, 'split'):
is_run_from_cache = True
split_end_time = start_time
split_exec_time = 'NOT RUN'
goto .definer
else: # cache not exist
is_run_from_cache = False
goto .split
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-definer-cslicer-definer', example):
is_match, matched_config = isStateMatch(None, 'split-definer-cslicer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='split-definer-cslicer-definer', \
suffix='split-definer-cslicer-definer', \
log_file=orig_history_file)
label .split
# -------------------------------- split start -------------------------------------
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-split')
# cache intermediate repo after split (S)
cachePrefixRepoIfNotAlreadyCached(example, 'split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='split-definer-cslicer-definer', start=start, \
repo_path=repo_path, branch='after-split')
split_end_time = time.time()
split_exec_time = split_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-cslicer-definer', example):
is_match, matched_config = isStateMatch(split_log, 'definer-cslicer-definer', \
example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-definer-cslicer-definer', \
suffix='definer-cslicer-definer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .definer
# -------------------------------- definer start ---------------------------------
# generate new config files for splitted history
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-split')
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer on splitted history, save logs
definer_log = output_dir + '/' + example + '.log.phase1'
runDefinerTool(definer_log, split_config_file, 'after-split')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-split-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'split-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - split_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer', example):
is_match, matched_config = isStateMatch(definer_log, 'cslicer-definer', example)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-definer-cslicer-definer', \
suffix='cslicer-definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .cslicer
# -------------------------------- cslicer start ---------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='split-definer', branch='after-split-definer')
# update cslicer config
cslicer_config_file = updateCSlicerConfig(example, end, configs_dir)
# run tests at end commit, generate jacoco files
_, end, _, test_suite, _, lines, _ = extractInfoFromCSlicerConfigs(example)
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer on splitted history, save logs
cslicer_log = output_dir + '/' + example + '.log.phase2'
runCSlicerTool(cslicer_log, cslicer_config_file, 'after-split-definer')
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_log)
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-split-definer-cslicer')
cachePrefixRepoIfNotAlreadyCached(example, 'split-definer-cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - definer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(cslicer_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-definer-cslicer-definer', \
suffix='definer', log_file=cslicer_log)
countChangedLines(cslicer_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='split-definer-cslicer', \
branch='after-split-definer-cslicer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
# checkout to original end commit and run the tests
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-split-definer-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-split-definer-cslicer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'split-definer-cslicer-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - cslicer_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
@with_goto
def runCSlicerDefinerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=CSLICER_DEFINER_DEFINER_OUTPUT_DIR, \
configs_dir=CSLICER_DEFINER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
# remove the old repo in _downloads dir
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'cslicer-definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-definer', repo_path)
elif isPrefixRepoCached(example, 'cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'cslicer-definer'):
is_run_from_cache =True
definer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
goto .definer
else: # cache not exist
is_run_from_cache = False
goto .cslicer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer-definer', example):
is_match, matched_config = isStateMatch(None, 'cslicer-definer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-definer', \
suffix='cslicer-definer-definer', log_file=orig_history_file)
label .cslicer
# -------------------------------- cslicer start -------------------------------------
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
cslicer_temp_log = output_dir + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_temp_log, config_file, 'orig')
# delete orig branch
sub.run('git checkout trunk', shell=True)
sub.run('git checkout master', shell=True)
sub.run('git branch -D orig', shell=True)
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_temp_log)
# for NET-525, NET-527
if example == 'NET-525' or example == 'NET-527':
cslicer_history_slice.append('4379a681')
commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer')
# cache intermediate repo after cslicer (C)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-definer', example):
is_match, matched_config = isStateMatch(cslicer_temp_log, 'definer-definer', \
example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-definer', \
suffix='definer-definer', log_file=cslicer_temp_log)
countChangedLines(cslicer_temp_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer', branch='after-cslicer')
# temp definer config file (CZ: we may change in the future to keep all the temp files)
definer_config_file = updateDefinerConfig(example, end, TEMP_CONFIGS_DIR)
definer_log = output_dir + '/' + example + '.log.phase2'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# move all untracked test files to temp dir (for running jacoco needed)-----------
p = sub.Popen('git ls-files --others --exclude-standard', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
lines = p.stdout.readlines()
for i in range(len(lines)):
lines[i] = lines[i].decode("utf-8")[:-1]
if lines[i].startswith('src/test/'):
dir_structure = '/'.join(lines[i].strip().split('/')[:-1])
dest_dir = TEMP_FILES_DIR + '/' + dir_structure
if os.path.isdir(dest_dir):
shutil.rmtree(dest_dir)
os.makedirs(dest_dir)
shutil.move(lines[i].strip(), dest_dir)
#os.remove(lines[i].strip())
# -------------------------------------------------------------------------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
definer_history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(definer_history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, definer_history_slice, commit_msg_list, \
'after-cslicer-definer')
# cache intermediate repo after cslicer-definer (CD)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - cslicer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(definer_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-definer', \
suffix='definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer-definer', \
branch='after-cslicer-definer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-cslicer-definer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch, reset start and end
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-cslicer-definer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - definer_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
cleanTempLogs()
@with_goto
def runDefinerCSlicerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=DEFINER_CSLICER_DEFINER_OUTPUT_DIR, \
configs_dir=DEFINER_CSLICER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'definer-cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer-cslicer', repo_path)
elif isPrefixRepoCached(example, 'definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'definer-cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
definer_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'definer'):
is_run_from_cache =True
definer_end_time = start_time
definer_exec_time = 'NOT RUN'
goto .cslicer
else: # cache not exist
is_run_from_cache = False
goto .definer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-cslicer-definer', example):
is_match, matched_config = isStateMatch(None, 'definer-cslicer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='definer-cslicer-definer', \
suffix='definer-cslicer-definer', log_file=orig_history_file)
label .definer
# -------------------------------- definer start -------------------------------------
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer, save temp logs
definer_log = output_dir + '/' + example + '.log.phase1'
runDefinerTool(definer_log, config_file, 'definerorig')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, 'after-definer')
# cache intermediate repo after definer (D)
cachePrefixRepoIfNotAlreadyCached(example, 'definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer', example):
is_match, matched_config = isStateMatch(definer_log, 'cslicer-definer', example)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='definer-cslicer-definer', \
suffix='cslicer-definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .cslicer
# -------------------------------- cslicer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='definer', branch='after-definer')
# update cslicer config
cslicer_config_file = updateCSlicerConfig(example, end, configs_dir)
# run tests at the original end commit, generate jacoco files
_, end, _, test_suite, _, lines, _ = extractInfoFromCSlicerConfigs(example)
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# run cslicer and save logs
cslicer_log = output_dir + '/' + example + '.log.phase2'
runCSlicerTool(cslicer_log, cslicer_config_file, 'after-definer')
history_slice, commit_msg_list = extractHistorySliceFromCSlicerLog(cslicer_log)
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-definer-cslicer')
cachePrefixRepoIfNotAlreadyCached(example, 'definer-cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - definer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(cslicer_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='definer-cslicer-definer', \
suffix='definer', log_file=cslicer_log)
countChangedLines(cslicer_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='definer-cslicer', \
branch='after-definer-cslicer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
# checkout to original end commit and run the tests
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-definer-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-definer-cslicer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'definer-cslicer-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - cslicer_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
# clean temp logs
cleanTempLogs()
@with_goto
def runSplitDefinerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=SPLIT_DEFINER_DEFINER_OUTPUT_DIR, \
configs_dir=SPLIT_DEFINER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'split-definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split-definer', repo_path)
elif isPrefixRepoCached(example, 'split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'split', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'split-definer'):
is_run_from_cache =True
definer_end_time = start_time
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'split'):
is_run_from_cache =True
split_end_time = start_time
split_exec_time = 'NOT RUN'
goto .definer
else: # cache not exist
is_run_from_cache = False
goto .split
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-definer-definer', example):
is_match, matched_config = isStateMatch(None, 'split-definer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='split-definer-definer', \
suffix='split-definer-definer', log_file=orig_history_file)
label .split
# -------------------------------- split start -------------------------------------
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-split')
# cache intermediate repo after split (S)
cachePrefixRepoIfNotAlreadyCached(example, 'split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='split-definer-definer', start=start, \
repo_path=repo_path, branch='after-split')
split_end_time = time.time()
split_exec_time = split_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-definer', example):
is_match, matched_config = isStateMatch(split_log, 'definer-definer', example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-definer-definer', \
suffix='definer-definer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
# generate new config files for splitted history
_, end, _, _, test_suite, _, lines, _ = extractInfoFromDefinerConfigs(example)
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-split')
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer on splitted history, save logs
definer_log = output_dir + '/' + example + '.log.phase1'
runDefinerTool(definer_log, split_config_file, 'after-split')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# cherry-pick history slice to a new branch
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-split-definer')
# cache intermediate repo after split-definer (SD)
cachePrefixRepoIfNotAlreadyCached(example, 'split-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - split_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(definer_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='split-definer-definer', \
suffix='definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='split-definer', branch='after-split-definer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-split-definer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-split-definer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'split-definer-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - definer_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
@with_goto
def runDefinerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=DEFINER_DEFINER_OUTPUT_DIR, \
configs_dir=DEFINER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
start_time = time.time()
# extract info from config file
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'definer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'definer'):
is_run_from_cache =True
definer_end_time = start_time
definer_exec_time = 'NOT RUN'
goto .definer2
else: # cache not exist
is_run_from_cache = False
goto .definer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-definer', example):
is_match, matched_config = isStateMatch(None, 'definer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='definer-definer', \
suffix='definer-definer', log_file=orig_history_file)
label .definer
# -------------------------------- definer start -------------------------------------
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer, save temp logs
definer_log = output_dir + '/' + example + '.log.phase1'
runDefinerTool(definer_log, config_file, 'definerorig')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, 'after-definer')
# cache intermediate repo after definer (D)
cachePrefixRepoIfNotAlreadyCached(example, 'definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(definer_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='definer-definer', suffix='definer', \
log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='definer', branch='after-definer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-definer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch, reset start and end
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-definer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'definer-definer', repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - definer_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
# clean temp logs
cleanTempLogs()
@with_goto
def runCSlicerDefinerSplitCSlicerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=CSLICER_DEFINER_SPLIT_CSLICER_DEFINER_OUTPUT_DIR, \
configs_dir=CSLICER_DEFINER_SPLIT_CSLICER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
# remove the old repo in _downloads dir
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'cslicer-definer-split-cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + \
'cslicer-definer-split-cslicer', repo_path)
elif isPrefixRepoCached(example, 'cslicer-definer-split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-definer-split', \
repo_path)
elif isPrefixRepoCached(example, 'cslicer-definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-definer', repo_path)
elif isPrefixRepoCached(example, 'cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'cslicer-definer-split-cslicer'):
is_run_from_cache =True
cslicer2_end_time = start_time
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'cslicer-definer-split'):
is_run_from_cache =True
split_end_time = start_time
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
goto .cslicer2
elif isPrefixRepoCached(example, 'cslicer-definer'):
is_run_from_cache =True
definer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
goto .split
elif isPrefixRepoCached(example, 'cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
goto .definer
else: # cache not exist
is_run_from_cache = False
goto .cslicer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer-split-cslicer-definer', example):
is_match, matched_config = isStateMatch(None, \
'cslicer-definer-split-cslicer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-cslicer-definer', \
suffix='cslicer-definer-split-cslicer-definer', \
log_file=orig_history_file)
label .cslicer
# -------------------------------- cslicer start -------------------------------------
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# copy target dir because we will run CSlicer again later
# cache target dir, otherwise we cannot run CSlicer2 in the middle
cacheTargetDirForCSlicer2(example, repo_path)
cslicer_temp_log = output_dir + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_temp_log, config_file, 'orig')
# delete orig branch
sub.run('git checkout trunk', shell=True)
sub.run('git checkout master', shell=True)
sub.run('git branch -D orig', shell=True)
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_temp_log)
# for NET-525, NET-527
if example == 'NET-525' or example == 'NET-527':
cslicer_history_slice.append('4379a681')
commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer')
# cache intermediate repo after cslicer (C)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-split-cslicer-definer', example):
is_match, matched_config = isStateMatch(cslicer_temp_log, \
'definer-split-cslicer-definer', example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-cslicer-definer', \
suffix='definer-split-cslicer-definer', log_file=cslicer_temp_log)
countChangedLines(cslicer_temp_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer', branch='after-cslicer')
# temp definer config file (CZ: we may change in the future to keep all the temp files)
definer_config_file = updateDefinerConfig(example, end, TEMP_CONFIGS_DIR)
definer_log = output_dir + '/' + example + '.log.phase2'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# move all untracked test files to temp dir (for running jacoco needed)-----------
p = sub.Popen('git ls-files --others --exclude-standard', shell=True, \
stdout=sub.PIPE, stderr=sub.PIPE)
p.wait()
lines = p.stdout.readlines()
for i in range(len(lines)):
lines[i] = lines[i].decode("utf-8")[:-1]
if lines[i].startswith('src/test/'):
dir_structure = '/'.join(lines[i].strip().split('/')[:-1])
dest_dir = TEMP_FILES_DIR + '/' + dir_structure
if os.path.isdir(dest_dir):
shutil.rmtree(dest_dir)
os.makedirs(dest_dir)
shutil.move(lines[i].strip(), dest_dir)
#os.remove(lines[i].strip())
# -------------------------------------------------------------------------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# extract history slice from definer log
definer_history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(definer_history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
split_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, definer_history_slice, commit_msg_list, \
'after-cslicer-definer')
# cache intermediate repo after cslicer-definer (CD)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - cslicer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-cslicer-definer', example):
is_match, matched_config = isStateMatch(definer_log, 'split-cslicer-definer', \
example)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-cslicer-definer', \
suffix='split-cslicer-definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .split
# -------------------------------- split start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer-definer', \
branch='after-cslicer-definer')
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-cslicer-definer-split')
# cache intermediate repo after cslicer-definer-split (CDS)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer-split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='cslicer-definer-split-cslicer-definer', \
start=start, repo_path=repo_path, \
branch='after-cslicer-definer-split')
split_end_time = time.time()
split_exec_time = split_end_time - definer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer', example):
is_match, matched_config = isStateMatch(split_log, 'cslicer-definer', example)
if is_match:
is_suffix_skipped = True
cslicer2_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-cslicer-definer', \
suffix='cslicer-definer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .cslicer2
# -------------------------------- cslicer2 start -------------------------------------
# generate new config files for splitted history
_, end, _, _, _, lines, _ = extractInfoFromCSlicerConfigs(example)
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-cslicer-definer-split')
# move untracked files back
os.chdir(repo_path)
for dir_path, subpaths, files in os.walk(TEMP_FILES_DIR):
for f in files:
if '/src/test' in dir_path:
shutil.copy(dir_path + '/' + f, \
repo_path + dir_path[dir_path.index('/src/test'):])
# copy target dir back (required by CSlicer)
copyTargetDirBackForCSlicer2(example, repo_path)
# run cslicer on split history, save logs
cslicer_log = output_dir + '/' + example + '.log.phase3'
runCSlicerTool(cslicer_log, split_config_file, 'after-cslicer-definer-split')
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_log)
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer-definer-split-cslicer')
# cache intermediate repo after cslicer (CDSC)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer-split-cslicer', repo_path)
cslicer2_end_time = time.time()
cslicer2_exec_time = cslicer2_end_time - split_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(cslicer_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-definer-split-cslicer-definer', \
suffix='definer', log_file=cslicer_log)
countChangedLines(cslicer_log, repo_path, 'cslicer')
# -------------------------------- cslicer2 end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer-definer-split-cslicer', \
branch='after-cslicer-definer-split-cslicer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-cslicer-definer-split-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch, reset start and end
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-cslicer-definer-split-cslicer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-definer-split-cslicer-definer', \
repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - cslicer2_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[CSlicer2 Exec Time]'] = cslicer2_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
cleanTempLogs()
@with_goto
def runCSlicerSplitDefinerCSlicerDefiner(example, share_prefix, share_suffix, \
orig_history_dir=ORIG_HISTORY_DIR, \
cached_repos_dir=CACHED_REPOS_DIR, \
output_dir=CSLICER_SPLIT_DEFINER_CSLICER_DEFINER_OUTPUT_DIR, \
configs_dir=CSLICER_SPLIT_DEFINER_CSLICER_DEFINER_CONFIGS_DIR):
print ('Starting Example :' + example)
# start counting the exec time
start_time = time.time()
start, end, repo_name, test_suite, repo_path, lines, config_file = \
extractInfoFromCSlicerConfigs(example)
# remove the old repo in _downloads dir
if os.path.isdir(repo_path):
print ('remove old repo')
time.sleep(30)
shutil.rmtree(repo_path, ignore_errors=True)
# check if cache is disabled
if not share_prefix:
is_run_from_cache = False
shutil.copytree(repo_path + '-cache', repo_path)
goto .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# copy the cached repo if exist
if isPrefixRepoCached(example, 'cslicer-split-definer-cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + \
'cslicer-split-definer-cslicer', repo_path)
elif isPrefixRepoCached(example, 'cslicer-split-definer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-split-definer', \
repo_path)
elif isPrefixRepoCached(example, 'cslicer-split'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer-split', repo_path)
elif isPrefixRepoCached(example, 'cslicer'):
shutil.copytree(cached_repos_dir + '/' + example + '/' + 'cslicer', repo_path)
else:
# no cache, copy a new repo
shutil.copytree(repo_path + '-cache', repo_path)
if isPrefixRepoCached(example, 'cslicer-split-definer-cslicer'):
is_run_from_cache =True
cslicer2_end_time = start_time
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
goto .definer2
elif isPrefixRepoCached(example, 'cslicer-split-definer'):
is_run_from_cache =True
definer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
goto .cslicer2
elif isPrefixRepoCached(example, 'cslicer-split'):
is_run_from_cache =True
split_end_time = start_time
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
goto .definer
elif isPrefixRepoCached(example, 'cslicer'):
is_run_from_cache =True
cslicer_end_time = start_time
cslicer_exec_time = 'NOT RUN'
goto .split
else: # cache not exist
is_run_from_cache = False
goto .cslicer
label .prefix_disabled
# a label indicating whether any suffix is saved in this run
is_suffix_skipped = False
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-split-definer-cslicer-definer', example):
is_match, matched_config = isStateMatch(None, \
'cslicer-split-definer-cslicer-definer', \
example, start=start, end=end, \
repo_path=repo_path)
if is_match:
is_suffix_skipped = True
cslicer_exec_time = 'NOT RUN'
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# suffix cache: cache initial state, using full suffix
orig_history = getOriginalHistory(start, end, repo_path)
orig_history_file = orig_history_dir + '/' + example + '.hist'
fw = open(orig_history_file, 'w')
fw.write('\n'.join(orig_history))
fw.close()
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer-cslicer-definer', \
suffix='cslicer-split-definer-cslicer-definer', \
log_file=orig_history_file)
label .cslicer
# -------------------------------- cslicer start -------------------------------------
# run tests at end commit, generate jacoco files
runTestsGenJacoco(example, end, repo_path, test_suite)
# stash changes on pom
sub.run('git stash', shell=True)
# copy target dir because we will run CSlicer again later
# cache target dir, otherwise we cannot run CSlicer2 in the middle
cacheTargetDirForCSlicer2(example, repo_path)
# run cslicer on original history, save temp logs
cslicer_temp_log = output_dir + '/' + example + '.log.phase1'
runCSlicerTool(cslicer_temp_log, config_file, 'orig')
# delete orig branch
sub.run('git checkout trunk', shell=True)
sub.run('git checkout master', shell=True)
sub.run('git branch -D orig', shell=True)
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_temp_log)
# for NET-525, NET-527
if example == 'NET-525' or example == 'NET-527':
cslicer_history_slice.append('4379a681')
commit_msg_list.append('Cut-n-paste bug')
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer')
# cache intermediate repo after cslicer (C)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer', repo_path)
cslicer_end_time = time.time()
cslicer_exec_time = cslicer_end_time - start_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('split-definer-cslicer-definer', example):
is_match, matched_config = isStateMatch(cslicer_temp_log, \
'split-definer-cslicer-definer', example)
if is_match:
is_suffix_skipped = True
split_exec_time = 'NOT RUN'
definer_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer-cslicer-definer', \
suffix='split-definer-cslicer-definer', \
log_file=cslicer_temp_log)
countChangedLines(cslicer_temp_log, repo_path, 'cslicer')
# -------------------------------- cslicer end -------------------------------------
label .split
# -------------------------------- split start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer', branch='after-cslicer')
# split commits by file
splitCommitsByFile(example, repo_path, start, end, 'after-cslicer-split')
# cache intermediate repo after cslicer-split (CS)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-split', repo_path)
# generate split log file
split_log = genSplitLogFile(example, config='cslicer-split-definer-cslicer-definer', \
start=start, repo_path=repo_path, branch='after-cslicer-split')
split_end_time = time.time()
split_exec_time = split_end_time - cslicer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer-cslicer-definer', example):
is_match, matched_config = isStateMatch(split_log, 'definer-cslicer-definer', \
example)
if is_match:
is_suffix_skipped = True
definer_exec_time = 'NOT RUN'
cslicer2_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer-cslicer-definer', \
suffix='definer-cslicer-definer', log_file=split_log)
countChangedLines(split_log, repo_path, 'split')
# -------------------------------- split end -------------------------------------
label .definer
# -------------------------------- definer start -------------------------------------
# generate new config files for splitted history
_, end, _, _, test_suite, _, lines, _ = extractInfoFromDefinerConfigs(example)
split_config_file = genSplittedConfigFile(example, repo_path, lines, configs_dir, \
'after-cslicer-split')
# run definer on splitted history, save logs
definer_log = output_dir + '/' + example + '.log.phase2'
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, split_config_file, 'after-cslicer-split')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# cherry-pick history slice to a new branch
definer_history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(definer_history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer_exec_time = 'TIME OUT'
cslicer2_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, definer_history_slice, commit_msg_list, \
'after-cslicer-split-definer')
# cache intermediate repo after cslicer-definer-split-definer (CSD)
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-split-definer', repo_path)
definer_end_time = time.time()
definer_exec_time = definer_end_time - split_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('cslicer-definer', example):
is_match, matched_config = isStateMatch(definer_log, 'cslicer-definer', example)
if is_match:
is_suffix_skipped = True
cslicer2_exec_time = 'NOT RUN'
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer-cslicer-definer', \
suffix='cslicer-definer', log_file=definer_log)
countChangedLines(definer_log, repo_path, 'definer')
# -------------------------------- definer end -------------------------------------
label .cslicer2
# -------------------------------- cslicer2 start ---------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer-split-definer', \
branch='after-cslicer-split-definer')
# update cslicer config
cslicer_config_file = updateCSlicerConfig(example, end, configs_dir)
# move untracked files back
os.chdir(repo_path)
for dir_path, subpaths, files in os.walk(TEMP_FILES_DIR):
for f in files:
if '/src/test' in dir_path:
shutil.copy(dir_path + '/' + f, \
repo_path + dir_path[dir_path.index('/src/test'):])
# copy target dir back (required by CSlicer)
copyTargetDirBackForCSlicer2(example, repo_path)
# run cslicer on splitted history, save logs
cslicer_log = output_dir + '/' + example + '.log.phase3'
runCSlicerTool(cslicer_log, cslicer_config_file, 'after-cslicer-split-definer')
# cherry-pick history slice to a new branch, reset start and end
cslicer_history_slice, commit_msg_list = \
extractHistorySliceFromCSlicerLog(cslicer_log)
end = applyHistorySlice(repo_path, start, cslicer_history_slice, commit_msg_list, \
'after-cslicer-split-definer-cslicer')
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-split-definer-cslicer', repo_path)
cslicer2_end_time = time.time()
cslicer2_exec_time = cslicer2_end_time - definer_end_time
# check if any suffix reusable
if share_suffix:
if isSuffixExist('definer', example):
is_match, matched_config = isStateMatch(cslicer_log, 'definer', example)
if is_match:
is_suffix_skipped = True
definer2_exec_time = 'NOT RUN'
# find out saved by which config, then copy the slice and time of that config.
final_log = output_dir + '/' + example + '.log'
copyTheSliceFromOneConfigLogToFinalLog(matched_config, example, final_log)
goto .skip_suffix
# cache suffix:
cacheSuffixIfNotAlreadyCached(example, config='cslicer-split-definer-cslicer-definer', \
suffix='definer', log_file=cslicer_log)
countChangedLines(cslicer_log, repo_path, 'cslicer')
# -------------------------------- cslicer2 end -------------------------------------
label .definer2
# -------------------------------- definer2 start -------------------------------------
if is_run_from_cache:
end = getEndSHAFrombranch(example, config='cslicer-split-definer-cslicer', \
branch='after-cslicer-split-definer-cslicer')
# temp definer config file
definer_config_file = updateDefinerConfig(example, end, configs_dir)
definer_log = output_dir + '/' + example + '.log'
# checkout to original end commit and run the tests
_, end, _, _, test_suite, _, _, _ = extractInfoFromDefinerConfigs(example)
os.chdir(repo_path)
# --------------
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
runDefinerTool(definer_log, definer_config_file, 'after-cslicer-split-definer-cslicer')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# --------------
# cherry-pick history slice to a new branch
history_slice, commit_msg_list = extractHistorySliceFromDefinerLog(definer_log)
if len(history_slice) == 0 and len(commit_msg_list) == 0:
print ('Definer times out!')
definer2_exec_time = 'TIME OUT'
final_log = definer_log
goto .timeout
end = applyHistorySlice(repo_path, start, history_slice, commit_msg_list, \
'after-cslicer-split-definer-cslicer-definer')
cachePrefixRepoIfNotAlreadyCached(example, 'cslicer-split-definer-cslicer-definer', \
repo_path)
definer2_end_time = time.time()
definer2_exec_time = definer2_end_time - cslicer2_end_time
final_log = definer_log
# -------------------------------- definer2 end -------------------------------------
label .timeout
label .skip_suffix
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
time_dict = collections.OrderedDict({})
time_dict['[CSlicer Exec Time]'] = cslicer_exec_time
time_dict['[Split Exec Time]'] = split_exec_time
time_dict['[Definer Exec Time]'] = definer_exec_time
time_dict['[CSlicer2 Exec Time]'] = cslicer2_exec_time
time_dict['[Definer2 Exec Time]'] = definer2_exec_time
time_dict['[Total Exec Time]'] = run_time
insertTimeDictinLog(final_log, time_dict)
if not is_suffix_skipped:
countChangedLines(final_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
# clean temp logs
cleanTempLogs()
# For true minimal exp
def runDefinerWithMemoryStandalone(example):
print ('Starting Example :' + example)
start_time = time.time()
# extract info from config file
start, end, repo_name, build_script_path, test_suite, repo_path, lines, config_file = \
extractInfoFromDefinerConfigs(example)
if os.path.isdir(repo_path):
print ('remove old repo')
shutil.rmtree(repo_path)
shutil.copytree(repo_path + '-cache', repo_path)
# checkout to end commit and run the tests
runTestsAtEndCommitForDefiner(example, end, repo_path, test_suite)
# run definer, save temp logs
definer_log = DEFINER_WITH_MEMORY_STANDALONE_OUTPUT_DIR + '/' + example + '.log'
runDefinerToolWithMemory(definer_log, config_file, 'definerorig')
cleanRepoAfterDefinerTimeout(repo_path) # when definer timeout, remove lock files
# -------------------------------- definer end -------------------------------------
# debug: move repo to somewhere else
end_time = time.time()
run_time = end_time - start_time
putTimeinLog(definer_log, run_time)
countChangedLines(definer_log, repo_path, 'definer')
#backupRepoForDebugging(example, repo_path)
if __name__ == '__main__':
opts = parseArgs(sys.argv[1:])
# check dirs: orig-history, temp-files, temp-logs,
# _split_logs, jacoco-files, _repo, temp-configs
# check repos: create _downloads dir and clone csv, lang,
# net, io, compress, then create copies
if opts.clean_prefix_cache:
shutil.rmtree(CACHED_REPOS_DIR)
os.makedirs(CACHED_REPOS_DIR)
if opts.clean_suffix_cache:
shutil.rmtree(SUFFIX_SHARING_CACHE_DIR)
os.makedirs(SUFFIX_SHARING_CACHE_DIR)
if opts.share_prefix:
share_prefix = True
else:
share_prefix = False
if opts.share_suffix:
share_suffix = True
else:
share_suffix = False
if opts.clean_touchset:
cleanTouchSet()
exit(0)
if opts.split_cslicer:
for example in examples:
runSplitCSlicer(example)
exit(0)
if opts.split_definer:
for example in examples:
runSplitDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_split_cslicer:
for example in examples:
runCSlicerSplitCSlicer(example)
exit(0)
if opts.cslicer_split_definer:
for example in examples:
runCSlicerSplitDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_split_cslicer:
for example in examples:
runDefinerSplitCSlicer(example)
exit(0)
if opts.definer_split_definer:
for example in examples:
runDefinerSplitDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_definer_split_cslicer:
for example in examples:
runCSlicerDefinerSplitCSlicer(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_definer_split_definer:
for example in examples:
runCSlicerDefinerSplitDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer:
for example in examples:
runCSlicerStandalone(example)
exit(0)
if opts.definer:
for example in examples:
runDefinerStandalone(example)
exit(0)
if opts.cslicer_definer:
for example in examples:
runCSlicerDefiner(example)
exit(0)
if opts.split_cslicer_definer:
for example in examples:
runSplitCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.split_cslicer_definer_definer:
for example in examples:
runSplitCSlicerDefinerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_split_definer_definer:
for example in examples:
runCSlicerSplitDefinerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_split_cslicer_definer:
for example in examples:
runDefinerSplitCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_cslicer_split_definer:
for example in examples:
runDefinerCSlicerSplitDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.split_definer_cslicer_definer:
for example in examples:
runSplitDefinerCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_definer_definer:
for example in examples:
runCSlicerDefinerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_cslicer_definer:
for example in examples:
runDefinerCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.split_definer_definer:
for example in examples:
runSplitDefinerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_definer:
for example in examples:
runDefinerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_split_definer_cslicer_definer :
for example in examples:
time.sleep(30)
runCSlicerSplitDefinerCSlicerDefiner(example, share_prefix, share_suffix)
time.sleep(30)
exit(0)
if opts.cslicer_definer_split_cslicer_definer :
for example in examples:
time.sleep(30)
runCSlicerDefinerSplitCSlicerDefiner(example, share_prefix, share_suffix)
time.sleep(30)
exit(0)
if opts.definer_with_memory: # For true minimal exp
for example in examples:
runDefinerWithMemoryStandalone(example)
exit(0)
if opts.split_cslicer_one:
example = opts.split_cslicer_one
runSplitCSlicer(example, share_prefix, share_suffix)
exit(0)
if opts.split_definer_one:
example = opts.split_definer_one
runSplitDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_split_cslicer_one:
example = opts.cslicer_split_cslicer_one
runCSlicerSplitCSlicer(example)
exit(0)
if opts.cslicer_split_definer_one:
example = opts.cslicer_split_definer_one
runCSlicerSplitDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_split_cslicer_one:
example = opts.definer_split_cslicer_one
runDefinerSplitCSlicer(example, share_prefix, share_suffix)
exit(0)
if opts.definer_split_definer_one:
example = opts.definer_split_definer_one
runDefinerSplitDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_definer_split_cslicer_one:
example = opts.cslicer_definer_split_cslicer_one
runCSlicerDefinerSplitCSlicer(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_definer_split_definer_one:
example = opts.cslicer_definer_split_definer_one
runCSlicerDefinerSplitDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_one:
example = opts.cslicer_one
runCSlicerStandalone(example)
exit(0)
if opts.definer_one:
example = opts.definer_one
runDefinerStandalone(example)
exit(0)
if opts.cslicer_definer_one:
example = opts.cslicer_definer_one
runCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.split_cslicer_definer_one:
example = opts.split_cslicer_definer_one
runSplitCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.split_cslicer_definer_definer_one:
example = opts.split_cslicer_definer_definer_one
runSplitCSlicerDefinerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_split_definer_definer_one:
example = opts.cslicer_split_definer_definer_one
runCSlicerSplitDefinerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_split_cslicer_definer_one:
example = opts.definer_split_cslicer_definer_one
runDefinerSplitCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_cslicer_split_definer_one:
example = opts.definer_cslicer_split_definer_one
runDefinerCSlicerSplitDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.split_definer_cslicer_definer_one:
example = opts.split_definer_cslicer_definer_one
runSplitDefinerCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_definer_definer_one:
example = opts.cslicer_definer_definer_one
runCSlicerDefinerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_cslicer_definer_one:
example = opts.definer_cslicer_definer_one
runDefinerCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.split_definer_definer_one:
example = opts.split_definer_definer_one
runSplitDefinerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_definer_one:
example = opts.definer_definer_one
runDefinerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_split_definer_cslicer_definer_one:
example = opts.cslicer_split_definer_cslicer_definer_one
runCSlicerSplitDefinerCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.cslicer_definer_split_cslicer_definer_one:
example = opts.cslicer_definer_split_cslicer_definer_one
runCSlicerDefinerSplitCSlicerDefiner(example, share_prefix, share_suffix)
exit(0)
if opts.definer_with_memory_one: # For true minimal exp
example = opts.definer_with_memory_one
runDefinerWithMemoryStandalone(example)
exit(0)
| 50.129782
| 111
| 0.620852
| 31,083
| 280,426
| 5.360519
| 0.019303
| 0.033369
| 0.016769
| 0.021342
| 0.935884
| 0.921745
| 0.901327
| 0.886701
| 0.875646
| 0.858553
| 0
| 0.00518
| 0.251703
| 280,426
| 5,593
| 112
| 50.138745
| 0.788851
| 0.160909
| 0
| 0.809808
| 0
| 0
| 0.125337
| 0.049467
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017009
| false
| 0
| 0.002209
| 0
| 0.030263
| 0.020323
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
887ed964d1267da86a27264d50e60b67f0a3a061
| 131
|
py
|
Python
|
lib/galaxy/webapps/reports/__init__.py
|
blankenberg/galaxy-data-resource
|
ca32a1aafd64948f489a4e5cf88096f32391b1d9
|
[
"CC-BY-3.0"
] | null | null | null |
lib/galaxy/webapps/reports/__init__.py
|
blankenberg/galaxy-data-resource
|
ca32a1aafd64948f489a4e5cf88096f32391b1d9
|
[
"CC-BY-3.0"
] | 1
|
2015-02-21T18:48:19.000Z
|
2015-02-27T15:50:32.000Z
|
lib/galaxy/webapps/reports/__init__.py
|
blankenberg/galaxy-data-resource
|
ca32a1aafd64948f489a4e5cf88096f32391b1d9
|
[
"CC-BY-3.0"
] | 3
|
2015-02-22T13:34:16.000Z
|
2020-10-01T01:28:04.000Z
|
"""The Galaxy Reports application."""
from galaxy.web.framework import url_for
from galaxy.web.framework.decorators import expose
| 26.2
| 50
| 0.80916
| 18
| 131
| 5.833333
| 0.666667
| 0.190476
| 0.247619
| 0.419048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099237
| 131
| 4
| 51
| 32.75
| 0.889831
| 0.236641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
31f9b2b7f6b348d2fd0d58500e58e4a8041c5ef2
| 308
|
py
|
Python
|
src/std/coppertop/std/stats.py
|
DangerMouseB/coppertop
|
26e9b01034b29fa8ec0e41bf1fa2b81c7c7bb34d
|
[
"BSD-3-Clause"
] | null | null | null |
src/std/coppertop/std/stats.py
|
DangerMouseB/coppertop
|
26e9b01034b29fa8ec0e41bf1fa2b81c7c7bb34d
|
[
"BSD-3-Clause"
] | null | null | null |
src/std/coppertop/std/stats.py
|
DangerMouseB/coppertop
|
26e9b01034b29fa8ec0e41bf1fa2b81c7c7bb34d
|
[
"BSD-3-Clause"
] | null | null | null |
# *******************************************************************************
#
# Copyright (c) 2021 David Briant. All rights reserved.
#
# *******************************************************************************
from coppertop.std._stats import core
from coppertop.std._stats.core import *
| 28
| 81
| 0.344156
| 20
| 308
| 5.2
| 0.7
| 0.25
| 0.307692
| 0.403846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014337
| 0.094156
| 308
| 10
| 82
| 30.8
| 0.358423
| 0.701299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
ee1f011a9339cddf2d8fb2edd7133b8cc80c14ef
| 32,212
|
py
|
Python
|
scripts/stagelibs.py
|
magic-lantern-android-studio/mle-tools
|
66e10e122a0845a6c297baadc21fa3215faeee0a
|
[
"MIT"
] | null | null | null |
scripts/stagelibs.py
|
magic-lantern-android-studio/mle-tools
|
66e10e122a0845a6c297baadc21fa3215faeee0a
|
[
"MIT"
] | null | null | null |
scripts/stagelibs.py
|
magic-lantern-android-studio/mle-tools
|
66e10e122a0845a6c297baadc21fa3215faeee0a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# This script is used to stage the Magic Lantern Core and Parts libraries into their
# corresponding Android Studio projects.
import os
import shutil
import tempfile
import argparse
# Initialize project list.
g_projectSize = 10
g_projects = [None] * g_projectSize
g_projects[0] = "core-math"
g_projects[1] = 'core-runtime'
g_projects[2] = "parts-base"
g_projects[3] = "parts-mrefs"
g_projects[4] = "parts-roles"
g_projects[5] = "parts-props"
g_projects[6] = "parts-stages"
g_projects[7] = "parts-sets"
g_projects[8] = "parts-actors"
g_projects[9] = "all"
# List the available Magic Lantern projects to stage.
def listProjects() :
"This function prints the list of available Magic Lantern projects."
for i in range(g_projectSize):
print g_projects[i]
return
# Stage core-math project.
def stageCoreMath() :
print "Staging core-math...";
if os.path.isdir('parts-base'):
print "\tUpdating parts-base"
if not os.path.exists('parts-base/app/libs'):
os.makedirs('parts-base/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'parts-base/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'parts-base/app/libs/mlmath-sources.jar')
if os.path.isdir('parts-props'):
print "\tUpdating parts-props"
if not os.path.exists('parts-props/app/libs'):
os.makedirs('parts-props/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'parts-props/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'parts-props/app/libs/mlmath-sources.jar')
if os.path.isdir('parts-roles'):
print "\tUpdating parts-roles"
if not os.path.exists('parts-roles/app/libs'):
os.makedirs('parts-roles/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'parts-roles/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'parts-roles/app/libs/mlmath-sources.jar')
if os.path.isdir('parts-sets'):
print "\tUpdating parts-sets"
if not os.path.exists('parts-sets/app/libs'):
os.makedirs('parts-sets/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'parts-sets/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'parts-sets/app/libs/mlmath-sources.jar')
if os.path.isdir('parts-stages'):
print "\tUpdating parts-stages"
if not os.path.exists('parts-stages/app/libs'):
os.makedirs('parts-stages/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'parts-stages/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'parts-stages/app/libs/mlmath-sources.jar')
if os.path.isdir('parts-actors'):
print "\tUpdating parts-actors"
if not os.path.exists('parts-actors/app/libs'):
os.makedirs('parts-actors/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'parts-actors/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'parts-actors/app/libs/mlmath-sources.jar')
if os.path.isdir('titles-imagetest'):
print "\tUpdating titles-imagetest"
if not os.path.exists('titles-imagetest/app/libs'):
os.makedirs('titles-imagetest/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'titles-imagetest/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'titles-imagetest/app/libs/mlmath-sources.jar')
if os.path.isdir('titles-modeltest'):
print "\tUpdating titles-modeltest"
if not os.path.exists('titles-modeltest/app/libs'):
os.makedirs('titles-modeltest/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'titles-modeltest/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'titles-modeltest/app/libs/mlmath-sources.jar')
if os.path.isdir('titles-hellocube'):
print "\tUpdating titles-hellocube"
if not os.path.exists('titles-hellocube/app/libs'):
os.makedirs('titles-hellocube/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'titles-hellocube/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'titles-hellocube/app/libs/mlmath-sources.jar')
if os.path.isdir('titles-cubetest'):
print "\tUpdating titles-cubetest"
if not os.path.exists('titles-cubetest/app/libs'):
os.makedirs('titles-cubetest/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'titles-cubetest/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'titles-cubetest/app/libs/mlmath-sources.jar')
if os.path.isdir('titles-movietest'):
print "\tUpdating titles-movietest"
if not os.path.exists('titles-movietest/app/libs'):
os.makedirs('titles-movietest/app/libs')
shutil.copy('core-math/app/release/mlmath.jar', 'titles-movietest/app/libs/mlmath.jar')
shutil.copy('core-math/app/release/mlmath-sources.jar', 'titles-movietest/app/libs/mlmath-sources.jar')
return
# Stage core-runtime project.
def stageCoreRuntime() :
print "Staging core-runtime...";
if os.path.isdir('parts-actors'):
print "\tUpdating parts-actors"
if not os.path.exists('parts-actors/app/libs'):
os.makedirs('parts-actors/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'parts-actors/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'parts-actors/app/libs/mlert-sources.jar')
if os.path.isdir('parts-base'):
print "\tUpdating parts-base"
if not os.path.exists('parts-base/app/libs'):
os.makedirs('parts-base/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'parts-base/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'parts-base/app/libs/mlert-sources.jar')
if os.path.isdir('parts-mrefs'):
print "\tUpdating parts-mrefs"
if not os.path.exists('parts-mrefs/app/libs'):
os.makedirs('parts-mrefs/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'parts-mrefs/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'parts-mrefs/app/libs/mlert-sources.jar')
if os.path.isdir('parts-props'):
print "\tUpdating parts-props"
if not os.path.exists('parts-props/app/libs'):
os.makedirs('parts-props/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'parts-props/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'parts-props/app/libs/mlert-sources.jar')
if os.path.isdir('parts-roles'):
print "\tUpdating parts-roles"
if not os.path.exists('parts-roles/app/libs'):
os.makedirs('parts-roles/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'parts-roles/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'parts-roles/app/libs/mlert-sources.jar')
if os.path.isdir('parts-sets'):
print "\tUpdating parts-sets"
if not os.path.exists('parts-sets/app/libs'):
os.makedirs('parts-sets/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'parts-sets/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'parts-sets/app/libs/mlert-sources.jar')
if os.path.isdir('parts-stages'):
print "\tUpdating parts-stages"
if not os.path.exists('parts-stages/app/libs'):
os.makedirs('parts-stages/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'parts-stages/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'parts-stages/app/libs/mlert-sources.jar')
if os.path.isdir('titles-imagetest'):
print "\tUpdating titles-imagetest"
if not os.path.exists('titles-imagetest/app/libs'):
os.makedirs('titles-imagetest/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'titles-imagetest/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'titles-imagetest/app/libs/mlert-sources.jar')
if os.path.isdir('titles-modeltest'):
print "\tUpdating titles-modeltest"
if not os.path.exists('titles-modeltest/app/libs'):
os.makedirs('titles-modeltest/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'titles-modeltest/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'titles-modeltest/app/libs/mlert-sources.jar')
if os.path.isdir('titles-hellocube'):
print "\tUpdating titles-hellocube"
if not os.path.exists('titles-hellocube/app/libs'):
os.makedirs('titles-hellocube/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'titles-hellocube/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'titles-hellocube/app/libs/mlert-sources.jar')
if os.path.isdir('titles-cubetest'):
print "\tUpdating titles-cubetest"
if not os.path.exists('titles-cubetest/app/libs'):
os.makedirs('titles-cubetest/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'titles-cubetest/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'titles-cubetest/app/libs/mlert-sources.jar')
if os.path.isdir('titles-movietest'):
print "\tUpdating titles-movietest"
if not os.path.exists('titles-movietest/app/libs'):
os.makedirs('titles-movietest/app/libs')
shutil.copy('core-runtime/app/release/mlert.jar', 'titles-movietest/app/libs/mlert.jar')
shutil.copy('core-runtime/app/release/mlert-sources.jar', 'titles-movietest/app/libs/mlert-sources.jar')
return
# Stage parts-base project.
def stagePartsBase() :
print "Staging parts-base...";
if os.path.isdir('parts-actors'):
print "\tUpdating parts-actors"
if not os.path.exists('parts-actors/app/libs'):
os.makedirs('parts-actors/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'parts-actors/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'parts-actors/app/libs/parts-sources.jar')
if os.path.isdir('parts-mrefs'):
print "\tUpdating parts-mrefs"
if not os.path.exists('parts-mrefs/app/libs'):
os.makedirs('parts-mrefs/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'parts-mrefs/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'parts-mrefs/app/libs/parts-sources.jar')
if not os.path.exists('parts-mrefs/min3d-debug'):
os.makedirs('parts-mrefs/min3d-debug')
shutil.copy('parts-base/min3d/build/outputs/aar/min3d-debug.aar', 'parts-mrefs/min3d-debug/min3d-debug.aar')
if os.path.isdir('parts-props'):
print "\tUpdating parts-props"
if not os.path.exists('parts-props/app/libs'):
os.makedirs('parts-props/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'parts-props/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'parts-props/app/libs/parts-sources.jar')
if os.path.isdir('parts-roles'):
print "\tUpdating parts-roles"
if not os.path.exists('parts-roles/app/libs'):
os.makedirs('parts-roles/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'parts-roles/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'parts-roles/app/libs/parts-sources.jar')
if not os.path.exists('parts-roles/min3d-debug'):
os.makedirs('parts-roles/min3d-debug')
shutil.copy('parts-base/min3d/build/outputs/aar/min3d-debug.aar', 'parts-roles/min3d-debug/min3d-debug.aar')
if os.path.isdir('parts-sets'):
print "\tUpdating parts-sets"
if not os.path.exists('parts-sets/app/libs'):
os.makedirs('parts-sets/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'parts-sets/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'parts-sets/app/libs/parts-sources.jar')
if os.path.isdir('parts-stages'):
print "\tUpdating parts-stages"
if not os.path.exists('parts-stages/app/libs'):
os.makedirs('parts-stages/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'parts-stages/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'parts-stages/app/libs/parts-sources.jar')
if os.path.isdir('titles-imagetest'):
print "\tUpdating titles-imagetest"
if not os.path.exists('titles-imagetest/app/libs'):
os.makedirs('titles-imagetest/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'titles-imagetest/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'titles-imagetest/app/libs/parts-sources.jar')
if os.path.isdir('titles-modeltest'):
print "\tUpdating titles-modeltest"
if not os.path.exists('titles-modeltest/app/libs'):
os.makedirs('titles-modeltest/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'titles-modeltest/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'titles-modeltest/app/libs/parts-sources.jar')
if not os.path.exists('titles-modeltest/min3d-debug'):
os.makedirs('titles-modeltest/min3d-debug')
shutil.copy('parts-base/min3d/build/outputs/aar/min3d-debug.aar', 'titles-modeltest/min3d-debug/min3d-debug.aar')
if os.path.isdir('titles-hellocube'):
print "\tUpdating titles-hellocube"
if not os.path.exists('titles-hellocube/app/libs'):
os.makedirs('titles-hellocube/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'titles-hellocube/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'titles-hellocube/app/libs/parts-sources.jar')
if not os.path.exists('titles-hellocube/min3d-debug'):
os.makedirs('titles-hellocube/min3d-debug')
shutil.copy('parts-base/min3d/build/outputs/aar/min3d-debug.aar', 'titles-hellocube/min3d-debug/min3d-debug.aar')
if os.path.isdir('titles-cubetest'):
print "\tUpdating titles-cubetest"
if not os.path.exists('titles-cubetest/app/libs'):
os.makedirs('titles-cubetest/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'titles-cubetest/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'titles-cubetest/app/libs/parts-sources.jar')
if not os.path.exists('titles-cubetest/min3d-debug'):
os.makedirs('titles-cubetest/min3d-debug')
shutil.copy('parts-base/min3d/build/outputs/aar/min3d-debug.aar', 'titles-cubetest/min3d-debug/min3d-debug.aar')
if os.path.isdir('titles-movietest'):
print "\tUpdating titles-movietest"
if not os.path.exists('titles-movietest/app/libs'):
os.makedirs('titles-movietest/app/libs')
shutil.copy('parts-base/app/release/parts.jar', 'titles-movietest/app/libs/parts.jar')
shutil.copy('parts-base/app/release/parts-sources.jar', 'titles-movietest/app/libs/parts-sources.jar')
if os.path.isdir('test-min3d_01'):
print "\tUpdating test-min3d_01"
if not os.path.exists('test-min3d_01/min3d-debug'):
os.makedirs('test-min3d_01/min3d-debug')
shutil.copy('parts-base/min3d/build/outputs/aar/min3d-debug.aar', 'test-min3d_01/min3d-debug/min3d-debug.aar')
if os.path.isdir('test-min3d_02'):
print "\tUpdating test-min3d_02"
if not os.path.exists('test-min3d_02/min3d-debug'):
os.makedirs('test-min3d_02/min3d-debug')
shutil.copy('parts-base/min3d/build/outputs/aar/min3d-debug.aar', 'test-min3d_02/min3d-debug/min3d-debug.aar')
return
# Stage parts-mrefs project.
def stagePartsMrefs() :
print "Staging parts-mrefs...";
if os.path.isdir('parts-actors'):
print "\tUpdating parts-actors"
if not os.path.exists('parts-actors/app/libs'):
os.makedirs('parts-actors/app/libs')
shutil.copy('parts-mrefs/app/release/mrefs.jar', 'parts-actors/app/libs/mrefs.jar')
shutil.copy('parts-mrefs/app/release/mrefs-sources.jar', 'parts-actors/app/libs/mrefs-sources.jar')
if os.path.isdir('parts-roles'):
print "\tUpdating parts-roles"
if not os.path.exists('parts-roles/app/libs'):
os.makedirs('parts-roles/app/libs')
shutil.copy('parts-mrefs/app/release/mrefs.jar', 'parts-roles/app/libs/mrefs.jar')
shutil.copy('parts-mrefs/app/release/mrefs-sources.jar', 'parts-roles/app/libs/mrefs-sources.jar')
if os.path.isdir('titles-imagetest'):
print "\tUpdating titles-imagetest"
if not os.path.exists('titles-imagetest/app/libs'):
os.makedirs('titles-imagetest/app/libs')
shutil.copy('parts-mrefs/app/release/mrefs.jar', 'titles-imagetest/app/libs/mrefs.jar')
shutil.copy('parts-mrefs/app/release/mrefs-sources.jar', 'titles-imagetest/app/libs/mrefs-sources.jar')
if os.path.isdir('titles-modeltest'):
print "\tUpdating titles-modeltest"
if not os.path.exists('titles-modeltest/app/libs'):
os.makedirs('titles-modeltest/app/libs')
shutil.copy('parts-mrefs/app/release/mrefs.jar', 'titles-modeltest/app/libs/mrefs.jar')
shutil.copy('parts-mrefs/app/release/mrefs-sources.jar', 'titles-modeltest/app/libs/mrefs-sources.jar')
if os.path.isdir('titles-hellocube'):
print "\tUpdating titles-hellocube"
if not os.path.exists('titles-hellocube/app/libs'):
os.makedirs('titles-hellocube/app/libs')
shutil.copy('parts-mrefs/app/release/mrefs.jar', 'titles-hellocube/app/libs/mrefs.jar')
shutil.copy('parts-mrefs/app/release/mrefs-sources.jar', 'titles-hellocube/app/libs/mrefs-sources.jar')
if os.path.isdir('titles-cubetest'):
print "\tUpdating titles-cubetest"
if not os.path.exists('titles-cubetest/app/libs'):
os.makedirs('titles-cubetest/app/libs')
shutil.copy('parts-mrefs/app/release/mrefs.jar', 'titles-cubetest/app/libs/mrefs.jar')
shutil.copy('parts-mrefs/app/release/mrefs-sources.jar', 'titles-cubetest/app/libs/mrefs-sources.jar')
if os.path.isdir('titles-movietest'):
print "\tUpdating titles-movietest"
if not os.path.exists('titles-movietest/app/libs'):
os.makedirs('titles-movietest/app/libs')
shutil.copy('parts-mrefs/app/release/mrefs.jar', 'titles-movietest/app/libs/mrefs.jar')
shutil.copy('parts-mrefs/app/release/mrefs-sources.jar', 'titles-movietest/app/libs/mrefs-sources.jar')
return
# Stage parts-roles project.
def stagePartsRoles() :
print "Staging parts-roles...";
if os.path.isdir('parts-props'):
print "\tUpdating parts-props"
if not os.path.exists('parts-props/app/libs'):
os.makedirs('parts-props/app/libs')
shutil.copy('parts-roles/app/release/roles.jar', 'parts-props/app/libs/roles.jar')
shutil.copy('parts-roles/app/release/roles-sources.jar', 'parts-props/app/libs/roles-sources.jar')
if os.path.isdir('parts-sets'):
print "\tUpdating parts-sets"
if not os.path.exists('parts-sets/app/libs'):
os.makedirs('parts-sets/app/libs')
shutil.copy('parts-roles/app/release/roles.jar', 'parts-sets/app/libs/roles.jar')
shutil.copy('parts-roles/app/release/roles-sources.jar', 'parts-sets/app/libs/roles-sources.jar')
if os.path.isdir('titles-imagetest'):
print "\tUpdating titles-imagetest"
if not os.path.exists('titles-imagetest/app/libs'):
os.makedirs('titles-imagetest/app/libs')
shutil.copy('parts-roles/app/release/roles.jar', 'titles-imagetest/app/libs/roles.jar')
shutil.copy('parts-roles/app/release/roles-sources.jar', 'titles-imagetest/app/libs/roles-sources.jar')
if os.path.isdir('titles-modeltest'):
print "\tUpdating titles-modeltest"
if not os.path.exists('titles-modeltest/app/libs'):
os.makedirs('titles-modeltest/app/libs')
shutil.copy('parts-roles/app/release/roles.jar', 'titles-modeltest/app/libs/roles.jar')
shutil.copy('parts-roles/app/release/roles-sources.jar', 'titles-modeltest/app/libs/roles-sources.jar')
if os.path.isdir('titles-hellocube'):
print "\tUpdating titles-hellocube"
if not os.path.exists('titles-hellocube/app/libs'):
os.makedirs('titles-hellocube/app/libs')
shutil.copy('parts-roles/app/release/roles.jar', 'titles-hellocube/app/libs/roles.jar')
shutil.copy('parts-roles/app/release/roles-sources.jar', 'titles-hellocube/app/libs/roles-sources.jar')
if os.path.isdir('titles-cubetest'):
print "\tUpdating titles-cubetest"
if not os.path.exists('titles-cubetest/app/libs'):
os.makedirs('titles-cubetest/app/libs')
shutil.copy('parts-roles/app/release/roles.jar', 'titles-cubetest/app/libs/roles.jar')
shutil.copy('parts-roles/app/release/roles-sources.jar', 'titles-cubetest/app/libs/roles-sources.jar')
if os.path.isdir('titles-movietest'):
print "\tUpdating titles-movietest"
if not os.path.exists('titles-movietest/app/libs'):
os.makedirs('titles-movietest/app/libs')
shutil.copy('parts-roles/app/release/roles.jar', 'titles-movietest/app/libs/roles.jar')
shutil.copy('parts-roles/app/release/roles-sources.jar', 'titles-movietest/app/libs/roles-sources.jar')
return
# Stage parts-props project.
def stagePartsProps() :
print "Staging parts-props...";
if os.path.isdir('parts-actors'):
print "\tUpdating parts-actors"
if not os.path.exists('parts-actors/app/libs'):
os.makedirs('parts-actors/app/libs')
shutil.copy('parts-props/app/release/props.jar', 'parts-actors/app/libs/props.jar')
shutil.copy('parts-props/app/release/props-sources.jar', 'parts-actors/app/libs/props-sources.jar')
if os.path.isdir('titles-imagetest'):
print "\tUpdating titles-imagetest"
if not os.path.exists('titles-imagetest/app/libs'):
os.makedirs('titles-imagetest/app/libs')
shutil.copy('parts-props/app/release/props.jar', 'titles-imagetest/app/libs/props.jar')
shutil.copy('parts-props/app/release/props-sources.jar', 'titles-imagetest/app/libs/props-sources.jar')
if os.path.isdir('titles-modeltest'):
print "\tUpdating titles-modeltest"
if not os.path.exists('titles-modeltest/app/libs'):
os.makedirs('titles-modeltest/app/libs')
shutil.copy('parts-props/app/release/props.jar', 'titles-modeltest/app/libs/props.jar')
shutil.copy('parts-props/app/release/props-sources.jar', 'titles-modeltest/app/libs/props-sources.jar')
if os.path.isdir('titles-hellocube'):
print "\tUpdating titles-hellocube"
if not os.path.exists('titles-hellocube/app/libs'):
os.makedirs('titles-hellocube/app/libs')
shutil.copy('parts-props/app/release/props.jar', 'titles-hellocube/app/libs/props.jar')
shutil.copy('parts-props/app/release/props-sources.jar', 'titles-hellocube/app/libs/props-sources.jar')
if os.path.isdir('titles-cubetest'):
print "\tUpdating titles-cubetest"
if not os.path.exists('titles-cubetest/app/libs'):
os.makedirs('titles-cubetest/app/libs')
shutil.copy('parts-props/app/release/props.jar', 'titles-cubetest/app/libs/props.jar')
shutil.copy('parts-props/app/release/props-sources.jar', 'titles-cubetest/app/libs/props-sources.jar')
if os.path.isdir('titles-movietest'):
print "\tUpdating titles-movietest"
if not os.path.exists('titles-movietest/app/libs'):
os.makedirs('titles-movietest/app/libs')
shutil.copy('parts-props/app/release/props.jar', 'titles-movietest/app/libs/props.jar')
shutil.copy('parts-props/app/release/props-sources.jar', 'titles-movietest/app/libs/props-sources.jar')
return
# Stage parts-stages project.
def stagePartsStages() :
print "Staging parts-stages...";
if os.path.isdir('parts-sets'):
print "\tUpdating parts-sets"
if not os.path.exists('parts-sets/app/libs'):
os.makedirs('parts-sets/app/libs')
shutil.copy('parts-stages/app/release/stages.jar', 'parts-sets/app/libs/stages.jar')
shutil.copy('parts-stages/app/release/stages-sources.jar', 'parts-sets/app/libs/stages-sources.jar')
if os.path.isdir('titles-imagetest'):
print "\tUpdating titles-imagetest"
if not os.path.exists('titles-imagetest/app/libs'):
os.makedirs('titles-imagetest/app/libs')
shutil.copy('parts-stages/app/release/stages.jar', 'titles-imagetest/app/libs/stages.jar')
shutil.copy('parts-stages/app/release/stages-sources.jar', 'titles-imagetest/app/libs/stages-sources.jar')
if os.path.isdir('titles-modeltest'):
print "\tUpdating titles-modeltest"
if not os.path.exists('titles-modeltest/app/libs'):
os.makedirs('titles-modeltest/app/libs')
shutil.copy('parts-stages/app/release/stages.jar', 'titles-modeltest/app/libs/stages.jar')
shutil.copy('parts-stages/app/release/stages-sources.jar', 'titles-modeltest/app/libs/stages-sources.jar')
if os.path.isdir('titles-hellocube'):
print "\tUpdating titles-hellocube"
if not os.path.exists('titles-hellocube/app/libs'):
os.makedirs('titles-hellocube/app/libs')
shutil.copy('parts-stages/app/release/stages.jar', 'titles-hellocube/app/libs/stages.jar')
shutil.copy('parts-stages/app/release/stages-sources.jar', 'titles-hellocube/app/libs/stages-sources.jar')
if os.path.isdir('titles-cubetest'):
print "\tUpdating titles-cubetest"
if not os.path.exists('titles-cubetest/app/libs'):
os.makedirs('titles-cubetest/app/libs')
shutil.copy('parts-stages/app/release/stages.jar', 'titles-cubetest/app/libs/stages.jar')
shutil.copy('parts-stages/app/release/stages-sources.jar', 'titles-cubetest/app/libs/stages-sources.jar')
if os.path.isdir('titles-movietest'):
print "\tUpdating titles-movietest"
if not os.path.exists('titles-movietest/app/libs'):
os.makedirs('titles-movietest/app/libs')
shutil.copy('parts-stages/app/release/stages.jar', 'titles-movietest/app/libs/stages.jar')
shutil.copy('parts-stages/app/release/stages-sources.jar', 'titles-movietest/app/libs/stages-sources.jar')
return
# Stage parts-sets project.
def stagePartsSets() :
print "Staging parts-sets...";
if os.path.isdir('titles-imagetest'):
print "\tUpdating titles-imagetest"
if not os.path.exists('titles-imagetest/app/libs'):
os.makedirs('titles-imagetest/app/libs')
shutil.copy('parts-sets/app/release/sets.jar', 'titles-imagetest/app/libs/sets.jar')
shutil.copy('parts-sets/app/release/sets-sources.jar', 'titles-imagetest/app/libs/sets-sources.jar')
if os.path.isdir('titles-modeltest'):
print "\tUpdating titles-modeltest"
if not os.path.exists('titles-modeltest/app/libs'):
os.makedirs('titles-modeltest/app/libs')
shutil.copy('parts-sets/app/release/sets.jar', 'titles-modeltest/app/libs/sets.jar')
shutil.copy('parts-sets/app/release/sets-sources.jar', 'titles-modeltest/app/libs/sets-sources.jar')
if os.path.isdir('titles-hellocube'):
print "\tUpdating titles-hellocube"
if not os.path.exists('titles-hellocube/app/libs'):
os.makedirs('titles-hellocube/app/libs')
shutil.copy('parts-sets/app/release/sets.jar', 'titles-hellocube/app/libs/sets.jar')
shutil.copy('parts-sets/app/release/sets-sources.jar', 'titles-hellocube/app/libs/sets-sources.jar')
if os.path.isdir('titles-cubetest'):
print "\tUpdating titles-cubetest"
if not os.path.exists('titles-cubetest/app/libs'):
os.makedirs('titles-cubetest/app/libs')
shutil.copy('parts-sets/app/release/sets.jar', 'titles-cubetest/app/libs/sets.jar')
shutil.copy('parts-sets/app/release/sets-sources.jar', 'titles-cubetest/app/libs/sets-sources.jar')
if os.path.isdir('titles-movietest'):
print "\tUpdating titles-movietest"
if not os.path.exists('titles-movietest/app/libs'):
os.makedirs('titles-movietest/app/libs')
shutil.copy('parts-sets/app/release/sets.jar', 'titles-movietest/app/libs/sets.jar')
shutil.copy('parts-sets/app/release/sets-sources.jar', 'titles-movietest/app/libs/sets-sources.jar')
return
# Stage parts-actors project.
def stagePartsActors() :
print "Staging parts-actors...";
if os.path.isdir('titles-imagetest'):
print "\tUpdating titles-imagetest"
if not os.path.exists('titles-imagetest/app/libs'):
os.makedirs('titles-imagetest/app/libs')
shutil.copy('parts-actors/app/release/actors.jar', 'titles-imagetest/app/libs/actors.jar')
shutil.copy('parts-actors/app/release/actors-sources.jar', 'titles-imagetest/app/libs/actors-sources.jar')
if os.path.isdir('titles-modeltest'):
print "\tUpdating titles-modeltest"
if not os.path.exists('titles-modeltest/app/libs'):
os.makedirs('titles-modeltest/app/libs')
shutil.copy('parts-actors/app/release/actors.jar', 'titles-modeltest/app/libs/actors.jar')
shutil.copy('parts-actors/app/release/actors-sources.jar', 'titles-modeltest/app/libs/actors-sources.jar')
if os.path.isdir('titles-hellocube'):
print "\tUpdating titles-hellocube"
if not os.path.exists('titles-hellocube/app/libs'):
os.makedirs('titles-hellocube/app/libs')
shutil.copy('parts-actors/app/release/actors.jar', 'titles-hellocube/app/libs/actors.jar')
shutil.copy('parts-actors/app/release/actors-sources.jar', 'titles-hellocube/app/libs/actors-sources.jar')
if os.path.isdir('titles-cubetest'):
print "\tUpdating titles-cubetest"
if not os.path.exists('titles-cubetest/app/libs'):
os.makedirs('titles-cubetest/app/libs')
shutil.copy('parts-actors/app/release/actors.jar', 'titles-cubetest/app/libs/actors.jar')
shutil.copy('parts-actors/app/release/actors-sources.jar', 'titles-cubetest/app/libs/actors-sources.jar')
if os.path.isdir('titles-movietest'):
print "\tUpdating titles-movietest"
if not os.path.exists('titles-movietest/app/libs'):
os.makedirs('titles-movietest/app/libs')
shutil.copy('parts-actors/app/release/actors.jar', 'titles-movietest/app/libs/actors.jar')
shutil.copy('parts-actors/app/release/actors-sources.jar', 'titles-movietest/app/libs/actors-sources.jar')
return
# Parse input arguments.
parser = argparse.ArgumentParser(description="Stage Magic Lantern projects.")
parser.add_argument('project', choices=g_projects, help='project to stage' )
args = parser.parse_args()
# Stage select project.
if args.project == "core-math" :
stageCoreMath()
elif args.project == "core-runtime" :
stageCoreRuntime()
elif args.project == "parts-base" :
stagePartsBase()
elif args.project == "parts-mrefs" :
stagePartsMrefs()
elif args.project == "parts-roles" :
stagePartsRoles()
elif args.project == "parts-props" :
stagePartsProps()
elif args.project == "parts-sets" :
stagePartsSets()
elif args.project == "parts-stages" :
stagePartsStages()
elif args.project == "parts-actors" :
stagePartsActors()
else :
stageCoreMath()
stageCoreRuntime()
stagePartsBase()
stagePartsMrefs()
stagePartsRoles()
stagePartsProps()
stagePartsStages()
stagePartsSets()
stagePartsActors()
print "...Done"
| 57.727599
| 121
| 0.676766
| 4,426
| 32,212
| 4.919114
| 0.027113
| 0.090024
| 0.069585
| 0.038903
| 0.913099
| 0.897529
| 0.840667
| 0.836625
| 0.836625
| 0.832262
| 0
| 0.003088
| 0.155594
| 32,212
| 557
| 122
| 57.831239
| 0.797353
| 0.015584
| 0
| 0.586667
| 0
| 0
| 0.567426
| 0.441061
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.007619
| null | null | 0.16
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ee1f786a476461ad257efe6b3093d40ecb020fb6
| 87
|
py
|
Python
|
tests/acceptance/helpers/helper.py
|
vkpro-forks/selene
|
8d8aa2c7e0623e228f2ba2056c699ff8614d9444
|
[
"MIT"
] | null | null | null |
tests/acceptance/helpers/helper.py
|
vkpro-forks/selene
|
8d8aa2c7e0623e228f2ba2056c699ff8614d9444
|
[
"MIT"
] | null | null | null |
tests/acceptance/helpers/helper.py
|
vkpro-forks/selene
|
8d8aa2c7e0623e228f2ba2056c699ff8614d9444
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
def get_test_driver():
return webdriver.Firefox()
| 14.5
| 30
| 0.770115
| 11
| 87
| 5.909091
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16092
| 87
| 5
| 31
| 17.4
| 0.890411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c9fcf70e658eb102935ac3d9f5475add27d0e915
| 37
|
py
|
Python
|
test/run/t540.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t540.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t540.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
print [1,2][True]
print [1,2][False]
| 12.333333
| 18
| 0.621622
| 8
| 37
| 2.875
| 0.625
| 0.521739
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 0.108108
| 37
| 2
| 19
| 18.5
| 0.575758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
4ebabc888b99ca47274e576baae3dab0b4643ac7
| 6,775
|
py
|
Python
|
src/tec/ic/ia/p1/g08_data.py
|
Fuabioo/Proyecto-Corto-2-3
|
44bdfd5f2774e2d0d8c8af79dc55dac340f6f4b0
|
[
"MIT"
] | null | null | null |
src/tec/ic/ia/p1/g08_data.py
|
Fuabioo/Proyecto-Corto-2-3
|
44bdfd5f2774e2d0d8c8af79dc55dac340f6f4b0
|
[
"MIT"
] | null | null | null |
src/tec/ic/ia/p1/g08_data.py
|
Fuabioo/Proyecto-Corto-2-3
|
44bdfd5f2774e2d0d8c8af79dc55dac340f6f4b0
|
[
"MIT"
] | 1
|
2021-10-20T22:13:04.000Z
|
2021-10-20T22:13:04.000Z
|
from tec.ic.ia.pc1 import g08
import numpy
from sklearn.preprocessing import LabelEncoder
from keras.utils import np_utils
from sklearn.preprocessing import StandardScaler,MinMaxScaler
def shaped_data(dataset):
dataset = numpy.array(dataset)
X = dataset[:,1:-2].astype(float)
X0 = dataset[:,0]
X32 = dataset[:,-2]
Y = dataset[:,-1]
# encode class values as integers
encoderY = LabelEncoder()
encoderY.fit(Y)
encoded_Y = encoderY.transform(Y)
# encode class values as integers
encoderX0 = LabelEncoder()
encoderX0.fit(X0)
X0 = encoderX0.transform(X0)
# encode class values as integers
encoderX32 = LabelEncoder()
encoderX32.fit(X32)
X32 = encoderX32.transform(X32)
X = numpy.concatenate((X0.reshape((-1, 1)), X), axis=1)
X = numpy.concatenate((X, X32.reshape((-1, 1))), axis=1)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y = np_utils.to_categorical(encoded_Y)
return X,dummy_y
def shaped_data2(dataset):
dataset = numpy.array(dataset)
X = dataset[:,1:-3].astype(float)
X0 = dataset[:,0]
X32 = dataset[:,-2]
X31 = dataset[:,-3]
Y = dataset[:,-1]
# encode class values as integers
encoderY = LabelEncoder()
encoderY.fit(Y)
encoded_Y = encoderY.transform(Y)
# encode class values as integers
encoderX0 = LabelEncoder()
encoderX0.fit(X0)
X0 = encoderX0.transform(X0)
# encode class values as integers
encoderX32 = LabelEncoder()
encoderX32.fit(X32)
X32 = encoderX32.transform(X32)
X = numpy.concatenate((X0.reshape((-1, 1)), X), axis=1)
encoderX31 = LabelEncoder()
encoderX31.fit(X31)
X31 = encoderX31.transform(X31)
#X = numpy.concatenate((X, X31.reshape((-1, 1))), axis=1)
X_second = X
dummy_y2 = np_utils.to_categorical(X32)
X = numpy.concatenate((X, X32.reshape((-1, 1))), axis=1)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y = np_utils.to_categorical(encoded_Y)
scaler = StandardScaler()
scaler.fit(X)
X = scaler.transform(X)
scaler.fit(X_second)
X_second = scaler.transform(X_second)
return [X_second,X32],[X_second,encoded_Y],[X,encoded_Y]
def shaped_data_regression(dataset):
dataset = numpy.array(dataset)
X = dataset[:,1:-3].astype(float)
X0 = dataset[:,0]
X32 = dataset[:,-2]
X31 = dataset[:,-3]
Y = dataset[:,-1]
# encode class values as integers
encoderY = LabelEncoder()
encoderY.fit(Y)
encoded_Y = encoderY.transform(Y)
# encode class values as integers
encoderX0 = LabelEncoder()
encoderX0.fit(X0)
X0 = encoderX0.transform(X0)
# encode class values as integers
encoderX32 = LabelEncoder()
encoderX32.fit(X32)
X32 = encoderX32.transform(X32)
X = numpy.concatenate((X0.reshape((-1, 1)), X), axis=1)
encoderX31 = LabelEncoder()
encoderX31.fit(X31)
X31 = encoderX31.transform(X31)
#X = numpy.concatenate((X, X31.reshape((-1, 1))), axis=1)
X_second = X
dummy_y2 = np_utils.to_categorical(X32)
X = numpy.concatenate((X, X32.reshape((-1, 1))), axis=1)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y = np_utils.to_categorical(encoded_Y)
scaler = StandardScaler()
scaler.fit(X)
X = scaler.transform(X)
scaler.fit(X_second)
X_second = scaler.transform(X_second)
return [X_second,dummy_y2],[X_second,dummy_y],[X,dummy_y]
def shaped_data_no_bin(dataset):
dataset = numpy.array(dataset)
X = dataset[:,1:-2].astype(float)
X0 = dataset[:,0]
X32 = dataset[:,-2]
Y = dataset[:,-1]
# encode class values as integers
encoderY = LabelEncoder()
encoderY.fit(Y)
encoded_Y = encoderY.transform(Y)
# encode class values as integers
encoderX0 = LabelEncoder()
encoderX0.fit(X0)
X0 = encoderX0.transform(X0)
# encode class values as integers
encoderX32 = LabelEncoder()
encoderX32.fit(X32)
X32 = encoderX32.transform(X32)
X = numpy.concatenate((X0.reshape((-1, 1)), X), axis=1)
X = numpy.concatenate((X, X32.reshape((-1, 1))), axis=1)
Y = numpy.array([g08.PARTIDOS.index(Y[i]) for i in range(len(Y))])
X = numpy.concatenate((X, Y.reshape((-1, 1))), axis=1)
return X
def shaped_data_no_bin2(dataset):
dataset = numpy.array(dataset)
X = dataset[:,1:-3].astype(float)
X0 = dataset[:,0]
X32 = dataset[:,-2]
X31 = dataset[:,-3]
Y = dataset[:,-1]
# encode class values as integers
encoderY = LabelEncoder()
encoderY.fit(Y)
encoded_Y = encoderY.transform(Y)
# encode class values as integers
encoderX0 = LabelEncoder()
encoderX0.fit(X0)
X0 = encoderX0.transform(X0)
# encode class values as integers
encoderX32 = LabelEncoder()
encoderX32.fit(X32)
X32 = encoderX32.transform(X32)
encoderX31 = LabelEncoder()
encoderX31.fit(X31)
X31 = encoderX31.transform(X31)
X = numpy.concatenate((X0.reshape((-1, 1)), X), axis=1)
X = numpy.concatenate((X, X31.reshape((-1, 1))), axis=1)
X_second = X
X = numpy.concatenate((X, X32.reshape((-1, 1))), axis=1)
X_first = X
Y = numpy.array([g08.PARTIDOS.index(Y[i]) for i in range(len(Y))])
X = numpy.concatenate((X, Y.reshape((-1, 1))), axis=1)
X_second = numpy.concatenate((X_second, Y.reshape((-1, 1))), axis=1)
return X_first,X_second,X
def shaped_data_kdtrees(dataset):
dataset = numpy.array(dataset)
X = dataset[:,1:-3].astype(float)
X0 = dataset[:,0]
X32 = dataset[:,-2]
X31 = dataset[:,-3]
Y = dataset[:,-1]
# encode class values as integers
encoderY = LabelEncoder()
encoderY.fit(Y)
encoded_Y = encoderY.transform(Y)
# encode class values as integers
encoderX0 = LabelEncoder()
encoderX0.fit(X0)
X0 = encoderX0.transform(X0)
# encode class values as integers
encoderX32 = LabelEncoder()
encoderX32.fit(X32)
X32 = encoderX32.transform(X32)
encoderX31 = LabelEncoder()
encoderX31.fit(X31)
X31 = encoderX31.transform(X31)
scaler = MinMaxzScaler()
scaler.fit(X)
X = scaler.transform(X)
X = numpy.concatenate((X0.reshape((-1, 1)), X), axis=1)
X = numpy.concatenate((X, X31.reshape((-1, 1))), axis=1)
X_second = X
X = numpy.concatenate((X, X32.reshape((-1, 1))), axis=1)
X_first = X
Y = numpy.array([g08.PARTIDOS.index(Y[i]) for i in range(len(Y))])
X = numpy.concatenate((X, Y.reshape((-1, 1))), axis=1)
X_second = numpy.concatenate((X_second, Y.reshape((-1, 1))), axis=1)
return X_first,X_second,X
| 21.171875
| 72
| 0.629668
| 906
| 6,775
| 4.634658
| 0.081678
| 0.080019
| 0.045011
| 0.081448
| 0.923791
| 0.916171
| 0.916171
| 0.90974
| 0.908073
| 0.908073
| 0
| 0.062058
| 0.227011
| 6,775
| 319
| 73
| 21.238245
| 0.739736
| 0.127528
| 0
| 0.90184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03681
| false
| 0
| 0.030675
| 0
| 0.104294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
11d6bb536d2683d81b85afcd0742e9b8d5fc873e
| 176
|
py
|
Python
|
extensions/.stubs/clrclasses/System/Diagnostics/CodeAnalysis/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | 1
|
2020-03-25T03:27:24.000Z
|
2020-03-25T03:27:24.000Z
|
extensions/.stubs/clrclasses/System/Diagnostics/CodeAnalysis/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
extensions/.stubs/clrclasses/System/Diagnostics/CodeAnalysis/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
from __clrclasses__.System.Diagnostics.CodeAnalysis import ExcludeFromCodeCoverageAttribute
from __clrclasses__.System.Diagnostics.CodeAnalysis import SuppressMessageAttribute
| 58.666667
| 91
| 0.920455
| 14
| 176
| 11
| 0.571429
| 0.181818
| 0.25974
| 0.402597
| 0.636364
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 176
| 2
| 92
| 88
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
11efa5a88090b76201caac2b671e26ba6c605273
| 114
|
py
|
Python
|
sysopt/blocks/__init__.py
|
andrewjlock/sysopt
|
b9ef17e4532f3a3327afc696ec698eb5dd365350
|
[
"Apache-2.0"
] | null | null | null |
sysopt/blocks/__init__.py
|
andrewjlock/sysopt
|
b9ef17e4532f3a3327afc696ec698eb5dd365350
|
[
"Apache-2.0"
] | null | null | null |
sysopt/blocks/__init__.py
|
andrewjlock/sysopt
|
b9ef17e4532f3a3327afc696ec698eb5dd365350
|
[
"Apache-2.0"
] | 1
|
2022-03-09T03:59:49.000Z
|
2022-03-09T03:59:49.000Z
|
"""Common Blocks and Block factories."""
from sysopt.blocks.common import *
from sysopt.blocks.builders import *
| 22.8
| 40
| 0.763158
| 15
| 114
| 5.8
| 0.6
| 0.229885
| 0.367816
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122807
| 114
| 4
| 41
| 28.5
| 0.87
| 0.298246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eedb4fd9db579b478668fde603d95d1b3bc8dc60
| 5,351
|
py
|
Python
|
tests/test_rr.py
|
mpounsett/arke
|
633c53ecca272786465a5b5b905838225b32a137
|
[
"Apache-2.0"
] | null | null | null |
tests/test_rr.py
|
mpounsett/arke
|
633c53ecca272786465a5b5b905838225b32a137
|
[
"Apache-2.0"
] | 7
|
2017-03-18T20:26:50.000Z
|
2017-03-29T22:40:49.000Z
|
tests/test_rr.py
|
mpounsett/arke
|
633c53ecca272786465a5b5b905838225b32a137
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# Copyright 2017, Matthew Pounsett <matt@conundrum.com>
# ------------------------------------------------------------
from __future__ import unicode_literals
import os
import sys
import unittest
sys.path.insert(0,
os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
)
import arke.rr
class TestUnknownTypeMethod(unittest.TestCase):
def test_from_int(self):
r = arke.rr._generate_unknown_type(65280)
self.assertTrue(issubclass(r, arke.rr.RR))
self.assertEqual(r.value, 65280)
self.assertEqual(r.mnemonic, 'TYPE65280')
class TestGetTypeValueMethod(unittest.TestCase):
def test_from_int(self):
self.assertEqual(arke.rr.get_type_value(1), 1)
def test_from_mnemonic(self):
self.assertEqual(arke.rr.get_type_value('A'), 1)
self.assertEqual(arke.rr.get_type_value('NS'), 2)
self.assertEqual(arke.rr.get_type_value('CNAME'), 5)
def test_from_class(self):
self.assertEqual(arke.rr.get_type_value(arke.rr.A), 1)
self.assertEqual(arke.rr.get_type_value(arke.rr.NS), 2)
self.assertEqual(arke.rr.get_type_value(arke.rr.CNAME), 5)
def test_from_unknown(self):
self.assertEqual(arke.rr.get_type_value('TYPE65280'), 65280)
class TestGetTypeMnemonicMethod(unittest.TestCase):
def test_from_int(self):
self.assertEqual(arke.rr.get_type_mnemonic(1), 'A')
self.assertEqual(arke.rr.get_type_mnemonic(65280), 'TYPE65280')
def test_from_mnemonic(self):
self.assertEqual(arke.rr.get_type_mnemonic('A'), 'A')
self.assertEqual(arke.rr.get_type_mnemonic('NS'), 'NS')
self.assertEqual(arke.rr.get_type_mnemonic('CNAME'), 'CNAME')
def test_from_class(self):
self.assertEqual(arke.rr.get_type_mnemonic(arke.rr.A), 'A')
self.assertEqual(arke.rr.get_type_mnemonic(arke.rr.NS), 'NS')
self.assertEqual(arke.rr.get_type_mnemonic(arke.rr.CNAME), 'CNAME')
def test_from_unknown(self):
self.assertEqual(arke.rr.get_type_mnemonic('TYPE65280'), 'TYPE65280')
class TestUnknownClassMethod(unittest.TestCase):
def test_from_int(self):
r = arke.rr._generate_unknown_class(65280)
self.assertTrue(issubclass(r, arke.rr.Class))
self.assertEqual(r.value, 65280)
self.assertEqual(r.mnemonic, 'CLASS65280')
self.assertEqual(r.long_name, 'CLASS65280')
class TestGetClassValueMethod(unittest.TestCase):
def test_from_int(self):
self.assertEqual(arke.rr.get_class_value(1), 1)
def test_from_mnemonic(self):
self.assertEqual(arke.rr.get_class_value('IN'), 1)
self.assertEqual(arke.rr.get_class_value('CH'), 3)
self.assertEqual(arke.rr.get_class_value('HS'), 4)
def test_from_class(self):
self.assertEqual(arke.rr.get_class_value(arke.rr.IN), 1)
self.assertEqual(arke.rr.get_class_value(arke.rr.CH), 3)
self.assertEqual(arke.rr.get_class_value(arke.rr.HS), 4)
def test_from_unknown(self):
self.assertEqual(arke.rr.get_class_value('CLASS65280'), 65280)
class TestGetClassMnemonicMethod(unittest.TestCase):
def test_from_int(self):
self.assertEqual(arke.rr.get_class_mnemonic(1), 'IN')
self.assertEqual(arke.rr.get_class_mnemonic(65280), 'CLASS65280')
def test_from_mnemonic(self):
self.assertEqual(arke.rr.get_class_mnemonic('IN'), 'IN')
self.assertEqual(arke.rr.get_class_mnemonic('CH'), 'CH')
self.assertEqual(arke.rr.get_class_mnemonic('HS'), 'HS')
def test_from_class(self):
self.assertEqual(arke.rr.get_class_mnemonic(arke.rr.IN), 'IN')
self.assertEqual(arke.rr.get_class_mnemonic(arke.rr.CH), 'CH')
self.assertEqual(arke.rr.get_class_mnemonic(arke.rr.HS), 'HS')
def test_from_unknown(self):
self.assertEqual(
arke.rr.get_class_mnemonic('CLASS65280'),
'CLASS65280'
)
class TestGenerateMethods(unittest.TestCase):
def test_generate_from_mnemonic(self):
r = arke.rr.generate(
'A',
oname='www.example.com',
rrclass='IN',
ttl=200,
ip='192.0.2.1',
)
self.assertIsInstance(r, arke.rr.A)
self.assertEqual(r.value, 1)
self.assertEqual(r.mnemonic, 'A')
self.assertEqual(r.ttl, 200)
self.assertEqual(r.ip, '192.0.2.1')
def test_generate_from_int(self):
r = arke.rr.generate(
1,
oname='www.example.com',
rrclass='IN',
ttl=200,
ip='192.0.2.1',
)
self.assertIsInstance(r, arke.rr.A)
self.assertEqual(r.value, 1)
self.assertEqual(r.mnemonic, 'A')
self.assertEqual(r.ttl, 200)
self.assertEqual(r.ip, '192.0.2.1')
def test_generate_from_unknown_int(self):
r = arke.rr.generate(
65280,
oname='www.example.com',
rrclass='IN',
ttl=200,
rdata='more random text',
)
self.assertIsInstance(r, arke.rr.RR)
self.assertEqual(r.value, 65280)
self.assertEqual(r.mnemonic, 'TYPE65280')
self.assertEqual(r.ttl, 200)
self.assertEqual(r.rdata, 'more random text')
| 34.522581
| 78
| 0.635208
| 698
| 5,351
| 4.683381
| 0.113181
| 0.104619
| 0.197614
| 0.218415
| 0.808504
| 0.782502
| 0.775772
| 0.749159
| 0.683389
| 0.451514
| 0
| 0.042207
| 0.207438
| 5,351
| 154
| 79
| 34.746753
| 0.728602
| 0.036816
| 0
| 0.40678
| 0
| 0
| 0.057681
| 0
| 0
| 0
| 0
| 0
| 0.474576
| 1
| 0.177966
| false
| 0
| 0.042373
| 0
| 0.279661
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eedd67eff6b980dc381d7ce2be6d63ba6c0a7f33
| 14,735
|
py
|
Python
|
simpleblog/migrations/0001_initial.py
|
realnoobs/wagtail_simple_blog
|
01b35153f6dd90e9c12234a5aaae8eebe3940f37
|
[
"MIT"
] | null | null | null |
simpleblog/migrations/0001_initial.py
|
realnoobs/wagtail_simple_blog
|
01b35153f6dd90e9c12234a5aaae8eebe3940f37
|
[
"MIT"
] | null | null | null |
simpleblog/migrations/0001_initial.py
|
realnoobs/wagtail_simple_blog
|
01b35153f6dd90e9c12234a5aaae8eebe3940f37
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.10 on 2021-12-24 06:46
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.contrib.taggit
import modelcluster.fields
import mptt.fields
import simpleblog.blocks
import wagtail.contrib.routable_page.models
import wagtail.contrib.typed_table_block.blocks
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.embeds.blocks
import wagtail.images.blocks
class Migration(migrations.Migration):
initial = True
dependencies = [
('taggit', '0003_taggeditem_add_unique_index'),
('wagtailimages', '0023_add_choose_permissions'),
('wagtailcore', '0066_collection_management_permissions'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=80, unique=True, validators=[django.core.validators.MinLengthValidator(3)], verbose_name='Name')),
('icon', models.CharField(blank=True, help_text='Icon name', max_length=50, null=True)),
('slug', models.SlugField(blank=True, max_length=80, null=True, unique=True)),
('description', wagtail.core.fields.RichTextField(blank=True, null=True, verbose_name='Description')),
('lft', models.PositiveIntegerField(editable=False)),
('rght', models.PositiveIntegerField(editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(editable=False)),
('parent', mptt.fields.TreeForeignKey(blank=True, help_text='Categories and Menu Item, unlike tags, they can have a hierarchy. You might have a Jazz Item, and under that have children items for Bebop and Big Band. Totally optional.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='childrens', to='simpleblog.category')),
('thumbnail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.image')),
],
options={
'verbose_name': 'Category',
'verbose_name_plural': 'Categories',
},
),
migrations.CreateModel(
name='Post',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.page')),
('summary', wagtail.core.fields.RichTextField(blank=True, null=True, verbose_name='Summary')),
('custom_template', models.CharField(blank=True, max_length=100, null=True)),
('custom_styles', models.TextField(blank=True, null=True)),
('custom_scripts', models.TextField(blank=True, null=True)),
('show_comments', models.BooleanField(default=True, help_text='Show all comments')),
('allow_comments', models.BooleanField(default=True, help_text='Allow visitors to comments')),
('view_count', models.IntegerField(default=0, editable=False)),
('featured', models.BooleanField(default=False, help_text='Whether this page will appear featured posts list')),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='posts', to='simpleblog.category', verbose_name='category')),
],
options={
'ordering': ('-first_published_at',),
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='Article',
fields=[
('post_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='simpleblog.post')),
('contents', wagtail.core.fields.StreamField([('richtext', simpleblog.blocks.RichtextBlock()), ('quote', wagtail.core.blocks.StructBlock([('quote', wagtail.core.blocks.TextBlock(required=True)), ('author', wagtail.core.blocks.CharBlock(required=False)), ('link', wagtail.core.blocks.URLBlock(required=False))])), ('choosen_pages', wagtail.core.blocks.StructBlock([('title', wagtail.core.blocks.CharBlock(required=False)), ('style', wagtail.core.blocks.ChoiceBlock(choices=[('list', 'Page List'), ('card', 'Page Card')])), ('columns', wagtail.core.blocks.IntegerBlock(default=2, max_value=4, min_value=1)), ('show_thumbnail', wagtail.core.blocks.BooleanBlock(default=True, required=False)), ('show_summary', wagtail.core.blocks.BooleanBlock(default=True, required=False)), ('pages', wagtail.core.blocks.ListBlock(child_block=wagtail.core.blocks.PageChooserBlock(page_type=['simpleblog.Post'])))])), ('code', wagtail.core.blocks.StructBlock([('language', wagtail.core.blocks.CharBlock(required=True)), ('filename', wagtail.core.blocks.CharBlock(required=False)), ('caption', wagtail.core.blocks.TextBlock(required=False)), ('code', wagtail.core.blocks.TextBlock(required=True))])), ('gist', wagtail.core.blocks.StructBlock([('id', wagtail.core.blocks.CharBlock(required=True)), ('file', wagtail.core.blocks.CharBlock(help_text='If the gist has multiple files, specify the filename you want to show', required=False)), ('line', wagtail.core.blocks.CharBlock(help_text='Line numbers you want to show. The rest are removed. 1-3 or 1,2,3 or 2-', required=False)), ('highlight_line', wagtail.core.blocks.CharBlock(help_text='Line numbers you want to highlight. Uses the same syntax for line ranges as line', required=False)), ('hide_footer', wagtail.core.blocks.BooleanBlock(help_text='Removes the gist footer', required=False)), ('caption', wagtail.core.blocks.TextBlock(help_text='Places a header above the gist with your chosen caption string', required=False))])), ('diagram', wagtail.core.blocks.StructBlock([('title', wagtail.core.blocks.CharBlock(required=True)), ('caption', wagtail.core.blocks.RichTextBlock(required=False)), ('code', wagtail.core.blocks.TextBlock(required=True))])), ('embed', wagtail.core.blocks.StructBlock([('caption', wagtail.core.blocks.CharBlock(required=False)), ('embed', wagtail.embeds.blocks.EmbedBlock(max_height=400, max_width=800))])), ('image_gallery', wagtail.core.blocks.StructBlock([('title', wagtail.core.blocks.CharBlock(required=False)), ('width', wagtail.core.blocks.IntegerBlock(default=185, required=True)), ('height', wagtail.core.blocks.IntegerBlock(default=105, required=True)), ('classnames', wagtail.core.blocks.CharBlock(required=False)), ('images', wagtail.core.blocks.ListBlock(wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock()), ('caption', wagtail.core.blocks.CharBlock(required=False)), ('classnames', wagtail.core.blocks.CharBlock(required=False))])))])), ('table', simpleblog.blocks.CustomTableBlock(table_options={'autoColumnSize': False, 'colHeaders': False, 'contextMenu': ['row_above', 'row_below', '---------', 'col_left', 'col_right', '---------', 'remove_row', 'remove_col', '---------', 'undo', 'redo'], 'editor': 'text', 'height': 108, 'minSpareRows': 0, 'renderer': 'text', 'rowHeaders': False, 'startCols': 3, 'startRows': 3, 'stretchH': 'all'})), ('table_typed', wagtail.contrib.typed_table_block.blocks.TypedTableBlock([('text', wagtail.core.blocks.CharBlock()), ('numeric', wagtail.core.blocks.FloatBlock()), ('rich_text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('country', wagtail.core.blocks.ChoiceBlock(choices=[('be', 'Belgium'), ('fr', 'France'), ('de', 'Germany'), ('nl', 'Netherlands'), ('pl', 'Poland'), ('uk', 'United Kingdom')]))], group='Content Blocks'))], blank=True, help_text='Contents', null=True)),
],
options={
'abstract': False,
},
bases=('simpleblog.post',),
),
migrations.CreateModel(
name='TaggedPost',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content_object', modelcluster.fields.ParentalKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tagged_items', to='simpleblog.post')),
('tag', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='post_items', to='taggit.tag')),
],
options={
'verbose_name': 'Post Tag',
'verbose_name_plural': 'Post Tags',
},
),
migrations.AddField(
model_name='post',
name='tags',
field=modelcluster.contrib.taggit.ClusterTaggableManager(blank=True, help_text='A comma-separated list of tags.', through='simpleblog.TaggedPost', to='taggit.Tag', verbose_name='tags'),
),
migrations.AddField(
model_name='post',
name='thumbnail',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.image'),
),
migrations.CreateModel(
name='Index',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.page')),
('summary', wagtail.core.fields.RichTextField(blank=True, null=True, verbose_name='Summary')),
('custom_template', models.CharField(blank=True, max_length=100, null=True)),
('custom_styles', models.TextField(blank=True, null=True)),
('custom_scripts', models.TextField(blank=True, null=True)),
('show_comments', models.BooleanField(default=True, help_text='Show all comments')),
('allow_comments', models.BooleanField(default=True, help_text='Allow visitors to comments')),
('view_count', models.IntegerField(default=0, editable=False)),
('post_per_page', models.IntegerField(default=4, help_text='Number of post shown in each page.', validators=[django.core.validators.MinValueValidator(2), django.core.validators.MaxValueValidator(20)])),
('contents', wagtail.core.fields.StreamField([('richtext', simpleblog.blocks.RichtextBlock()), ('quote', wagtail.core.blocks.StructBlock([('quote', wagtail.core.blocks.TextBlock(required=True)), ('author', wagtail.core.blocks.CharBlock(required=False)), ('link', wagtail.core.blocks.URLBlock(required=False))])), ('choosen_pages', wagtail.core.blocks.StructBlock([('title', wagtail.core.blocks.CharBlock(required=False)), ('style', wagtail.core.blocks.ChoiceBlock(choices=[('list', 'Page List'), ('card', 'Page Card')])), ('columns', wagtail.core.blocks.IntegerBlock(default=2, max_value=4, min_value=1)), ('show_thumbnail', wagtail.core.blocks.BooleanBlock(default=True, required=False)), ('show_summary', wagtail.core.blocks.BooleanBlock(default=True, required=False)), ('pages', wagtail.core.blocks.ListBlock(child_block=wagtail.core.blocks.PageChooserBlock(page_type=['simpleblog.Post'])))])), ('code', wagtail.core.blocks.StructBlock([('language', wagtail.core.blocks.CharBlock(required=True)), ('filename', wagtail.core.blocks.CharBlock(required=False)), ('caption', wagtail.core.blocks.TextBlock(required=False)), ('code', wagtail.core.blocks.TextBlock(required=True))])), ('gist', wagtail.core.blocks.StructBlock([('id', wagtail.core.blocks.CharBlock(required=True)), ('file', wagtail.core.blocks.CharBlock(help_text='If the gist has multiple files, specify the filename you want to show', required=False)), ('line', wagtail.core.blocks.CharBlock(help_text='Line numbers you want to show. The rest are removed. 1-3 or 1,2,3 or 2-', required=False)), ('highlight_line', wagtail.core.blocks.CharBlock(help_text='Line numbers you want to highlight. Uses the same syntax for line ranges as line', required=False)), ('hide_footer', wagtail.core.blocks.BooleanBlock(help_text='Removes the gist footer', required=False)), ('caption', wagtail.core.blocks.TextBlock(help_text='Places a header above the gist with your chosen caption string', required=False))])), ('diagram', wagtail.core.blocks.StructBlock([('title', wagtail.core.blocks.CharBlock(required=True)), ('caption', wagtail.core.blocks.RichTextBlock(required=False)), ('code', wagtail.core.blocks.TextBlock(required=True))])), ('embed', wagtail.core.blocks.StructBlock([('caption', wagtail.core.blocks.CharBlock(required=False)), ('embed', wagtail.embeds.blocks.EmbedBlock(max_height=400, max_width=800))])), ('image_gallery', wagtail.core.blocks.StructBlock([('title', wagtail.core.blocks.CharBlock(required=False)), ('width', wagtail.core.blocks.IntegerBlock(default=185, required=True)), ('height', wagtail.core.blocks.IntegerBlock(default=105, required=True)), ('classnames', wagtail.core.blocks.CharBlock(required=False)), ('images', wagtail.core.blocks.ListBlock(wagtail.core.blocks.StructBlock([('image', wagtail.images.blocks.ImageChooserBlock()), ('caption', wagtail.core.blocks.CharBlock(required=False)), ('classnames', wagtail.core.blocks.CharBlock(required=False))])))])), ('table', simpleblog.blocks.CustomTableBlock(table_options={'autoColumnSize': False, 'colHeaders': False, 'contextMenu': ['row_above', 'row_below', '---------', 'col_left', 'col_right', '---------', 'remove_row', 'remove_col', '---------', 'undo', 'redo'], 'editor': 'text', 'height': 108, 'minSpareRows': 0, 'renderer': 'text', 'rowHeaders': False, 'startCols': 3, 'startRows': 3, 'stretchH': 'all'})), ('table_typed', wagtail.contrib.typed_table_block.blocks.TypedTableBlock([('text', wagtail.core.blocks.CharBlock()), ('numeric', wagtail.core.blocks.FloatBlock()), ('rich_text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('country', wagtail.core.blocks.ChoiceBlock(choices=[('be', 'Belgium'), ('fr', 'France'), ('de', 'Germany'), ('nl', 'Netherlands'), ('pl', 'Poland'), ('uk', 'United Kingdom')]))], group='Content Blocks'))], blank=True, help_text='Contents', null=True)),
('thumbnail', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.image')),
],
options={
'verbose_name': 'Index',
},
bases=(wagtail.contrib.routable_page.models.RoutablePageMixin, 'wagtailcore.page'),
),
]
| 120.778689
| 3,889
| 0.688225
| 1,695
| 14,735
| 5.887316
| 0.175811
| 0.102515
| 0.148211
| 0.078164
| 0.788255
| 0.781842
| 0.778335
| 0.770518
| 0.770518
| 0.763904
| 0
| 0.007942
| 0.136885
| 14,735
| 121
| 3,890
| 121.77686
| 0.776694
| 0.003122
| 0
| 0.464912
| 1
| 0.026316
| 0.223259
| 0.008034
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.114035
| 0
| 0.149123
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
01036bbc8577b3696a1c73bda624aba19b4f3206
| 215
|
py
|
Python
|
App/views.py
|
Yuchao99/Play
|
6aa6327b997189d3e272f4dc12c56f033e1ba782
|
[
"MIT"
] | null | null | null |
App/views.py
|
Yuchao99/Play
|
6aa6327b997189d3e272f4dc12c56f033e1ba782
|
[
"MIT"
] | null | null | null |
App/views.py
|
Yuchao99/Play
|
6aa6327b997189d3e272f4dc12c56f033e1ba782
|
[
"MIT"
] | null | null | null |
# coding:utf-8
from django.http import HttpResponse
from django.shortcuts import render
def index(request):
return render(request,'index.html')
def playGround(request):
return render(request, 'play.html')
| 21.5
| 39
| 0.75814
| 29
| 215
| 5.62069
| 0.586207
| 0.122699
| 0.233129
| 0.319018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005376
| 0.134884
| 215
| 10
| 40
| 21.5
| 0.870968
| 0.055814
| 0
| 0
| 0
| 0
| 0.094059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6dcf91e152e2d8e00ec8459db77e2b8de885989a
| 159
|
py
|
Python
|
gd/image/__init__.py
|
nekitdev/gd.py
|
b9d5e29c09f953f54b9b648fb677e987d9a8e103
|
[
"MIT"
] | 58
|
2020-09-30T16:51:22.000Z
|
2022-02-13T17:27:48.000Z
|
gd/image/__init__.py
|
NeKitDS/gd.py
|
b9d5e29c09f953f54b9b648fb677e987d9a8e103
|
[
"MIT"
] | 30
|
2019-07-29T12:03:41.000Z
|
2020-09-15T17:01:37.000Z
|
gd/image/__init__.py
|
NeKitDS/gd.py
|
b9d5e29c09f953f54b9b648fb677e987d9a8e103
|
[
"MIT"
] | 20
|
2019-12-06T03:16:57.000Z
|
2020-09-16T17:45:27.000Z
|
from gd.image.geometry import *
from gd.image.icon_factory import *
from gd.image.metadata import *
from gd.image.sheet import *
from gd.image.sprite import *
| 26.5
| 35
| 0.779874
| 26
| 159
| 4.730769
| 0.384615
| 0.243902
| 0.447154
| 0.552846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125786
| 159
| 5
| 36
| 31.8
| 0.884892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
0965fb73885939eb4475e64338884040c29e1d2c
| 7,993
|
py
|
Python
|
tests/test_connect.py
|
quantxt/qtcurate-sdk-python
|
7c60a97c808381680889b2934aa5146e9303274e
|
[
"Apache-2.0"
] | null | null | null |
tests/test_connect.py
|
quantxt/qtcurate-sdk-python
|
7c60a97c808381680889b2934aa5146e9303274e
|
[
"Apache-2.0"
] | null | null | null |
tests/test_connect.py
|
quantxt/qtcurate-sdk-python
|
7c60a97c808381680889b2934aa5146e9303274e
|
[
"Apache-2.0"
] | null | null | null |
from unittest import TestCase, main
from unittest.mock import patch, Mock
from qtcurate.exceptions import *
from qtcurate.connect import connect
import requests
from collections import namedtuple
class TestUtilities(TestCase):
# Testing connect function
def test_connect_data_type_arg_err(self):
with self.assertRaises(QtArgumentError):
connect(method='some_method', uri='some_uri', headers='some_headers', data_type='123')
@patch("qtcurate.connect.requests.Session")
def test_connect_method_get_data_type_none_conn_err(self, session):
mock = Mock()
mock.get.side_effect = requests.exceptions.ConnectionError("Connection error")
session.return_value = mock
with self.assertRaises(QtConnectionError):
connect(method='get', uri='some_uri', headers='some_headers')
@patch("qtcurate.connect.requests.Session")
def test_connect_method_get_data_type_none_qt_rest_api_err(self, session):
response = Mock()
response.status_code = 401
session.return_value.get.return_value = response
with self.assertRaises(QtRestApiError):
connect(method='get', uri='some_uri', headers='some_headers')
@patch("qtcurate.connect.requests.Session")
def test_connect_method_get_data_type_params_conn_err(self, session):
mock = Mock()
mock.get.side_effect = requests.exceptions.ConnectionError("Connection error")
session.return_value = mock
with self.assertRaises(QtConnectionError):
connect(method='get', uri='some_uri', headers='some_headers', data_type="params")
@patch("qtcurate.connect.requests.Session")
def test_connect_method_get_data_type_params_qt_rest_api_err(self, session):
response = Mock()
response.status_code = 401
session.return_value.get.return_value = response
with self.assertRaises(QtRestApiError):
connect(method='get', uri='some_uri', headers='some_headers')
@patch("qtcurate.connect.requests.Session")
def test_connect_method_delete_conn_err(self, session):
mock = Mock()
mock.delete.side_effect = requests.exceptions.ConnectionError("Connection error")
session.return_value = mock
with self.assertRaises(QtConnectionError):
connect(method='delete', uri='some_uri', headers='some_headers')
@patch("qtcurate.connect.requests.Session")
def test_connect_method_delete_qt_rest_api_err(self, session):
response = Mock()
response.status_code = 401
session.return_value.delete.return_value = response
with self.assertRaises(QtRestApiError):
connect(method='delete', uri='some_uri', headers='some_headers')
@patch("qtcurate.connect.requests.Session")
def test_connect_method_post_data_type_data_conn_err(self, session):
mock = Mock()
mock.post.side_effect = requests.exceptions.ConnectionError("Connection error")
session.return_value = mock
with self.assertRaises(QtConnectionError):
connect(method='post', uri='some_uri', headers='some_headers', data_type="data")
@patch("qtcurate.connect.requests.Session")
def test_connect_method_post_data_type_data_qt_rest_api_err(self, session):
response = Mock()
response.status_code = 401
session.return_value.post.return_value = response
with self.assertRaises(QtRestApiError):
connect(method='post', uri='some_uri', headers='some_headers', data_type="data")
@patch("qtcurate.connect.requests.Session")
def test_connect_method_post_data_type_files_conn_err(self, session):
mock = Mock()
mock.post.side_effect = requests.exceptions.ConnectionError("Connection error")
session.return_value = mock
with self.assertRaises(QtConnectionError):
connect(method='post', uri='some_uri', headers='some_headers', data_type="files")
@patch("qtcurate.connect.requests.Session")
def test_connect_method_post_data_type_files_qt_rest_api_err(self, session):
response = Mock()
response.status_code = 401
session.return_value.post.return_value = response
with self.assertRaises(QtRestApiError):
connect(method='post', uri='some_uri', headers='some_headers', data_type="files")
@patch("qtcurate.connect.requests.Session")
def test_connect_method_put_data_type_data_conn_err(self, session):
mock = Mock()
mock.put.side_effect = requests.exceptions.ConnectionError("Connection error")
session.return_value = mock
with self.assertRaises(QtConnectionError):
connect(method='put', uri='some_uri', headers='some_headers', data_type="data")
@patch("qtcurate.connect.requests.Session")
def test_connect_method_put_data_type_data_qt_rest_api_err(self, session):
response = Mock()
response.status_code = 401
session.return_value.put.return_value = response
with self.assertRaises(QtRestApiError):
connect(method='put', uri='some_uri', headers='some_headers', data_type="data")
@patch("qtcurate.connect.requests.Session")
def test_connect_method_put_data_type_files_conn_err(self, session):
mock = Mock()
mock.put.side_effect = requests.exceptions.ConnectionError("Connection error")
session.return_value = mock
with self.assertRaises(QtConnectionError):
connect(method='put', uri='some_uri', headers='some_headers', data_type="files")
@patch("qtcurate.connect.requests.Session")
def test_connect_method_put_data_type_files_qt_rest_api_err(self, session):
response = Mock()
response.status_code = 401
session.return_value.put.return_value = response
with self.assertRaises(QtRestApiError):
connect(method='put', uri='some_uri', headers='some_headers', data_type="files")
def test_connect_method_put_arg_err(self):
with self.assertRaises(QtArgumentError):
connect(method='put', uri='some_uri', headers='some_headers', data_type="params")
@patch("qtcurate.connect.requests.Session")
def test_connect_equal_get(self, session):
response = Mock()
response.status_code = 200
session.return_value.get.return_value = response
result = connect(method='get', uri='some_uri', headers='some_headers')
self.assertEqual(response, result)
@patch("qtcurate.connect.requests.Session")
def test_connect_method_get_data_type_params(self, session):
response = Mock()
response.status_code = 200
session.return_value.get.return_value = response
result = connect(method='get', uri='some_uri', headers='some_headers', data_type="params")
self.assertEqual(response, result)
@patch("qtcurate.connect.requests.Session")
def test_connect_method_delete(self, session):
response = Mock()
response.status_code = 200
session.return_value.delete.return_value = response
result = connect(method='delete', uri='some_uri', headers='some_headers')
self.assertEqual(response, result)
@patch("qtcurate.connect.requests.Session")
def test_connect_method_post_data_type_data(self, session):
response = Mock()
response.status_code = 200
session.return_value.post.return_value = response
result = connect(method='post', uri='some_uri', headers='some_headers', data_type="data")
self.assertEqual(response, result)
@patch("qtcurate.connect.requests.Session")
def test_connect_method_put_data_type_data(self, session):
response = Mock()
response.status_code = 200
session.return_value.put.return_value = response
result = connect(method='put', uri='some_uri', headers='some_headers', data_type="data")
self.assertEqual(response, result)
if __name__ == '__main__':
main()
| 41.201031
| 98
| 0.704116
| 949
| 7,993
| 5.646997
| 0.066386
| 0.097033
| 0.054861
| 0.066617
| 0.948311
| 0.944019
| 0.940661
| 0.920881
| 0.914163
| 0.881508
| 0
| 0.005997
| 0.186413
| 7,993
| 193
| 99
| 41.414508
| 0.818084
| 0.003003
| 0
| 0.736486
| 0
| 0
| 0.165307
| 0.0787
| 0
| 0
| 0
| 0
| 0.141892
| 1
| 0.141892
| false
| 0
| 0.040541
| 0
| 0.189189
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
098bc979c0c8c1f6be13c267204a615e22ff172f
| 30,860
|
py
|
Python
|
tests/test_indexer.py
|
abrookins/redis-sitesearch
|
2ebeedbd569801c61a4815f386d83cd18486de58
|
[
"MIT"
] | 23
|
2020-10-22T05:03:51.000Z
|
2022-01-17T23:25:25.000Z
|
tests/test_indexer.py
|
abrookins/redis-sitesearch
|
2ebeedbd569801c61a4815f386d83cd18486de58
|
[
"MIT"
] | 3
|
2021-07-16T18:28:55.000Z
|
2021-10-19T04:56:36.000Z
|
tests/test_indexer.py
|
abrookins/redis-sitesearch
|
2ebeedbd569801c61a4815f386d83cd18486de58
|
[
"MIT"
] | 9
|
2020-12-10T17:26:15.000Z
|
2022-02-11T09:03:46.000Z
|
import os
from unittest import mock
from unittest.mock import call
import pytest
from sitesearch.keys import Keys
from sitesearch.sites.redis_labs import OLD_DOCS_PROD
from sitesearch.errors import ParseError
from sitesearch.indexer import DocumentParser, Indexer, md5, SECTION_ID, PAGE_ID, page_id, section_id
from sitesearch.models import SearchDocument
DOCS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"documents")
FILE_WITH_SECTIONS = "page_with_sections.html"
FILE_WITHOUT_BREADCRUMBS = "page_without_breadcrumbs.html"
FILE_WITHOUT_TITLE = "page_without_title.html"
FILE_WITHOUT_LINK = "page_without_link.html"
FILE_RELEASE_NOTES = "release_notes.html"
FILE_WITH_AN_INDEX = "setup_and_editing.html"
FILE_WITH_H3s = "file_with_h3s.html"
TEST_URL = f"{OLD_DOCS_PROD.url}/test"
@pytest.fixture()
def indexer(app_config):
mock_search_client = mock.MagicMock()
yield Indexer(OLD_DOCS_PROD, app_config, mock_search_client)
@pytest.fixture()
def keys(app_config):
yield Keys(prefix=app_config.key_prefix)
@pytest.fixture()
def parse_file(site):
"""
This fixture parses a file with DocumentParser.
The fixture is a callable that takes the filename of a document
and returns the SearchDocuments parsed from the HTML in the file.
"""
def fn(filename):
file = os.path.join(DOCS_DIR, filename)
with open(file, encoding='utf-8') as f:
html = f.read()
return DocumentParser(site).parse(TEST_URL, html)
return fn
@pytest.fixture()
def index_file(indexer, parse_file):
"""
This fixture indexes a file using a RediSearch mock -- so that
we only record the calls made to the client.
After indexing the document, the fixture turns the Indexer
object used, so that tests can introspect it.
"""
def fn(filename):
for doc in parse_file(filename):
indexer.index_document(doc)
return indexer
return fn
def test_indexer_indexes_page_document(index_file, keys, site):
"""Test indexing pages.
NOTE: If this test fails, it may be that you changed the fixture HTML.
The test failure will tell you what the new document ID is, which will
include a new hash if the content in the fixture changed. Just copy
the new doc_id value into the test and run the test again.
"""
indexer = index_file(FILE_WITH_SECTIONS)
expected_doc = {
'doc_id': f'{TEST_URL}:page:92958f6633a6e3e56b869629ccc5d92b',
'title': 'Database Persistence with Redis Enterprise Software',
'section_title': '',
'hierarchy': '[]',
'url': TEST_URL,
's': 'test',
'body': 'All data is stored and managed exclusively in either RAM or RAM + Flash Memory (Redis on Flash) and therefore, is at risk of being lost upon a\xa0process or server failure.\xa0As Redis Enterprise Software is not just a caching solution, but also a full-fledged database, persistence to disk is critical. Therefore, Redis Enterprise Software supports persisting data to disk on a per-database basis and in multiple ways. There are two options for persistence: Append Only File (AOF) - A continuous writing of data to disk Snapshot (RDB) - An automatic periodic snapshot writing to disk Data persistence, via either mechanism, is used solely to rehydrate the database if the database process fails for any reason. It is not a replacement for backups, but something you do in addition to backups. To disable data persistence, select None. AOF writes the latest ‘write’ commands into a file every second, it resembles a traditional RDBMS’s redo log, if you are familiar with that. This file can later be ‘replayed’ in order to recover from a crash. A snapshot (RDB) on the other hand, is performed every one, six, or twelve hours. The snapshot is a dump of the data and while there is a potential of losing up to one hour of data, it is dramatically faster to recover from a snapshot compared to AOF recovery. Persistence can be configured either at time of database creation or by editing an existing database’s configuration. While the persistence model can be changed dynamically, just know that it can take time for your database to switch from one persistence model to the other. It depends on what you are switching from and to, but also on the size of your database. Note: For performance reasons, if you are going to be using AOF, it is highly recommended to make sure replication is enabled for that database as well. When these two features are enabled, persistence is performed\xa0on the database slave and does not impact performance on the master. Options for configuring data persistence There are six\xa0options for persistence in Redis Enterprise Software: Options Description None Data is not persisted to disk at all. Append Only File (AoF) on every write Data is fsynced to disk with every write. Append Only File (AoF) one second Data is fsynced to disk every second. Snapshot every 1 hour A snapshot of the database is created every hour. Snapshot every 6 hours A snapshot of the database is created every 6 hours. Snapshot every 12 hours A snapshot of the database is created every 12 hours. The first thing you need to do is determine if you even need persistence. Persistence is used to recover from a catastrophic failure, so make sure that you need to incur the overhead of persistence before you select it. If the database is being used as a cache, then you may not need persistence. If you do need persistence, then you need to identify\xa0which is the best type for your use case. Append only file (AOF) vs snapshot (RDB) Now that you know the available options, to assist in making a decision on which option is right for your use case, here is a table about the two: Append Only File (AOF) Snapshot (RDB) More resource intensive Less resource\xa0intensive Provides better durability (recover the latest point in time) Less durable Slower time to recover (Larger files) Faster recovery time More disk space required (files tend to grow large and require compaction) Requires less resource (I/O once every several hours and no compaction required) Data persistence and Redis on Flash with Active\\-Active active\\-active If you are enabling data persistence for databases running on Redis Enterprise Flash, by default both master and slave shards are configured to write to disk. This is unlike a standard Redis Enterprise Software database where only the slave shards persist to disk. This master and slave dual data persistence with replication is done to better protect the database against node failures. Flash-based databases are expected to hold larger datasets and repair times for shards can be longer under node failures. Having dual-persistence provides better protection against failures under these longer repair times. However, the dual data persistence with replication adds some processor and network overhead, especially in the case of cloud configurations with persistent storage that is network attached (e.g. EBS-backed volumes in AWS). There may be times where performance is critical for your use case and you don’t want to risk data persistence adding latency. If that is the case, you can disable data-persistence on the master shards using the following\xa0rladmin command: rladmin tune db db: master_persistence disabled Page Contents Options for configuring data persistence Append only file (AOF) vs snapshot (RDB) Data persistence and Redis on Flash',
'type': 'page',
'position': 0,
'__score': 1.0
}
key = keys.document(site.url, expected_doc['doc_id'])
call = indexer.search_client.redis.hset.call_args_list[0]
assert call[0][0] == key
assert expected_doc == call[1]['mapping']
def test_indexer_indexes_page_section_documents(index_file, keys, site):
"""
Test indexing page sections.
NOTE: If this test fails, it may be that you changed the fixture HTML.
The test failure will tell you what the new document ID is, which will
include a new hash if the content in the fixture changed. Just copy
the new doc_id value into the test and run the test again.
"""
indexer = index_file(FILE_WITH_SECTIONS)
expected_section_docs = [{
'doc_id': f'{TEST_URL}:section:2e1ea4c2dfa16d70a0254f4b00520687',
'title': 'Database Persistence with Redis Enterprise Software',
'section_title': 'Options for configuring data persistence',
'hierarchy': '[]',
'url': TEST_URL,
's': 'test',
'body':
'There are six\xa0options for persistence in Redis Enterprise Software: Options Description None Data is not persisted to disk at all. Append Only File (AoF) on every write Data is fsynced to disk with every write. Append Only File (AoF) one second Data is fsynced to disk every second. Snapshot every 1 hour A snapshot of the database is created every hour. Snapshot every 6 hours A snapshot of the database is created every 6 hours. Snapshot every 12 hours A snapshot of the database is created every 12 hours. The first thing you need to do is determine if you even need persistence. Persistence is used to recover from a catastrophic failure, so make sure that you need to incur the overhead of persistence before you select it. If the database is being used as a cache, then you may not need persistence. If you do need persistence, then you need to identify\xa0which is the best type for your use case.',
'type': 'section',
'position': 0,
'__score': 0.75,
}, {
'doc_id': f'{TEST_URL}:section:68139f8fbbf3f37c1c1a2c0f94ad90f1',
'title': 'Database Persistence with Redis Enterprise Software',
'section_title': 'Append only file (AOF) vs snapshot (RDB)',
'hierarchy': '[]',
'url': TEST_URL,
's': 'test',
'body':
'Now that you know the available options, to assist in making a decision on which option is right for your use case, here is a table about the two: Append Only File (AOF) Snapshot (RDB) More resource intensive Less resource\xa0intensive Provides better durability (recover the latest point in time) Less durable Slower time to recover (Larger files) Faster recovery time More disk space required (files tend to grow large and require compaction) Requires less resource (I/O once every several hours and no compaction required)',
'type': 'section',
'position': 1,
'__score': 0.75,
}, {
'doc_id': f'{TEST_URL}:section:948035a214a48ca06ef8e97c392c92b8',
'title': 'Database Persistence with Redis Enterprise Software',
'section_title': 'Data persistence and Redis on Flash with Active\\-Active',
's': 'test',
'hierarchy': '[]',
'url': TEST_URL,
'body':
'active\\-active If you are enabling data persistence for databases running on Redis Enterprise Flash, by default both master and slave shards are configured to write to disk. This is unlike a standard Redis Enterprise Software database where only the slave shards persist to disk. This master and slave dual data persistence with replication is done to better protect the database against node failures. Flash-based databases are expected to hold larger datasets and repair times for shards can be longer under node failures. Having dual-persistence provides better protection against failures under these longer repair times. However, the dual data persistence with replication adds some processor and network overhead, especially in the case of cloud configurations with persistent storage that is network attached (e.g. EBS-backed volumes in AWS). There may be times where performance is critical for your use case and you don’t want to risk data persistence adding latency. If that is the case, you can disable data-persistence on the master shards using the following\xa0rladmin command: rladmin tune db db: master_persistence disabled',
'type': 'section',
'position': 2,
'__score': 0.75
}]
# Ignore the first call, which is for the page. In this test,
# we're focused on the section documents
for i, doc in enumerate(expected_section_docs, start=1):
key = keys.document(site.url, doc['doc_id'])
call = indexer.search_client.redis.hset.call_args_list[i]
assert call[0][0] == key
assert doc == call[1]['mapping']
def test_document_parser_skips_pages_without_title(parse_file):
with pytest.raises(ParseError):
parse_file(FILE_WITHOUT_TITLE)
def test_document_parser_skips_release_notes(parse_file):
with pytest.raises(ParseError):
parse_file(FILE_RELEASE_NOTES)
def test_parsing_page_with_links_in_h2s_returns_body_content(parse_file):
"""A regression test."""
docs = parse_file(FILE_WITH_AN_INDEX)
for doc in docs:
assert doc.body is not None
def test_build_hierarchy(indexer):
indexer.seen_urls = {
"https://docs.redislabs.com/latest/1": "One",
"https://docs.redislabs.com/latest/1/2": "Two",
"https://docs.redislabs.com/latest/1/2/3": "Three",
}
doc = SearchDocument(doc_id="123",
title="Title",
section_title="Section",
hierarchy=[],
s="",
url="https://docs.redislabs.com/latest/1/2/3/",
body="This is the body",
type='page',
position=0)
assert indexer.build_hierarchy(doc) == ['One', 'Two', 'Three']
def test_indexer_indexes_sections_from_h3s(index_file, keys, site):
indexer = index_file(FILE_WITH_H3s)
expected_section_docs = [{
'doc_id': f'{TEST_URL}:section:b865fa879111e330b736d9be3e196048',
'title': 'RedisBloom Tutorial',
'section_title': '',
'hierarchy': '[]',
'url': 'https://docs.redislabs.com/latest//test',
'body': """Follow this link to register and subscribe to Redis Enterprise Cloud Step 2. Create a database with RedisBloom Module # Step 3. Connect to a database # Follow this link to know how to connect to a database Step 4. Getting Started with RedisBloom # In the next steps you will use some basic RedisBloom commands. You can run them from the Redis command-line interface (redis\-cli) or use the CLI available in RedisInsight. (See part 2 of this tutorial to learn more about using the RedisInsight CLI.) To interact with RedisBloom, you use the BF.ADD and BF.EXISTS commands. Let’s go ahead and test drive some RedisBloom-specific operations. We will create a basic dataset based on unique visitors’ IP addresses, and you will see how to: Create a Bloom filter Determine whether or not an item exists in the Bloom filter Add one or more items to the Bloom filter Determine whether or not a unique visitor’s IP address exists Let’s walk through the process step-by-step: Create a Bloom filter # Use the BF.ADD command to add a unique visitor IP address to the Bloom filter as shown here: >> BF.ADD unique_visitors 10.94.214.120 (integer) 1 (1.75s) Copy Determine whether or not an item exists # Use the BF.EXISTS command to determine whether or not an item may exist in the Bloom filter: >> BF.EXISTS unique_visitors 10.94.214.120 (integer) 1 Copy >> BF.EXISTS unique_visitors 10.94.214.121 (integer) 0 (1.46s) Copy In the above example, the first command shows the result as “1”, indicating that the item may exist, whereas the second command displays "0", indicating that the item certainly may not exist. Add one or more items to the Bloom filter # Use the BF.MADD command to add one or more items to the Bloom filter, creating the filter if it does not yet exist. This command operates identically to BF.ADD, except it allows multiple inputs and returns multiple values: >> BF.MADD unique_visitors 10.94.214.100 10.94.214.200 10.94.214.210 10.94.214.212 1) (integer) 1 2) (integer) 1 3) (integer) 1 4) (integer) 1 Copy As shown above, the BF.MADD allows you to add one or more visitors’ IP addresses to the Bloom filter. Determine whether or not a unique visitor’s IP address exists # Use BF.MEXISTS to determine if one or more items may exist in the filter or not: >> BF.MEXISTS unique_visitors 10.94.214.200 10.94.214.212 1) (integer) 1 2) (integer) 1 Copy >> BF.MEXISTS unique_visitors 10.94.214.200 10.94.214.213 1) (integer) 1 2) (integer) 0 Copy In the above example, the first command shows the result as “1” for both the visitors’ IP addresses, indicating that these items do exist. The second command displays "0" for one of the visitor’s IP addresses, indicating that the item certainly does not exist. Next Step # Learn more about RedisBloom in the Quick Start tutorial.""",
'type': 'section',
's': 'test',
'position': 0,
'__score': 0.75
}, {
'doc_id': f'{TEST_URL}:section:23d7ae247e11c7851ce08896040e0922',
'title': 'RedisBloom Tutorial',
'section_title': '',
'hierarchy': '[]',
'url': 'https://docs.redislabs.com/latest//test',
'body': 'Step 3. Connect to a database # Follow this link to know how to connect to a database Step 4. Getting Started with RedisBloom # In the next steps you will use some basic RedisBloom commands. You can run them from the Redis command-line interface (redis\\-cli) or use the CLI available in RedisInsight. (See part 2 of this tutorial to learn more about using the RedisInsight CLI.) To interact with RedisBloom, you use the BF.ADD and BF.EXISTS commands. Let’s go ahead and test drive some RedisBloom-specific operations. We will create a basic dataset based on unique visitors’ IP addresses, and you will see how to: Create a Bloom filter Determine whether or not an item exists in the Bloom filter Add one or more items to the Bloom filter Determine whether or not a unique visitor’s IP address exists Let’s walk through the process step-by-step: Create a Bloom filter # Use the BF.ADD command to add a unique visitor IP address to the Bloom filter as shown here: >> BF.ADD unique_visitors 10.94.214.120 (integer) 1 (1.75s) Copy Determine whether or not an item exists # Use the BF.EXISTS command to determine whether or not an item may exist in the Bloom filter: >> BF.EXISTS unique_visitors 10.94.214.120 (integer) 1 Copy >> BF.EXISTS unique_visitors 10.94.214.121 (integer) 0 (1.46s) Copy In the above example, the first command shows the result as “1”, indicating that the item may exist, whereas the second command displays "0", indicating that the item certainly may not exist. Add one or more items to the Bloom filter # Use the BF.MADD command to add one or more items to the Bloom filter, creating the filter if it does not yet exist. This command operates identically to BF.ADD, except it allows multiple inputs and returns multiple values: >> BF.MADD unique_visitors 10.94.214.100 10.94.214.200 10.94.214.210 10.94.214.212 1) (integer) 1 2) (integer) 1 3) (integer) 1 4) (integer) 1 Copy As shown above, the BF.MADD allows you to add one or more visitors’ IP addresses to the Bloom filter. Determine whether or not a unique visitor’s IP address exists # Use BF.MEXISTS to determine if one or more items may exist in the filter or not: >> BF.MEXISTS unique_visitors 10.94.214.200 10.94.214.212 1) (integer) 1 2) (integer) 1 Copy >> BF.MEXISTS unique_visitors 10.94.214.200 10.94.214.213 1) (integer) 1 2) (integer) 0 Copy In the above example, the first command shows the result as “1” for both the visitors’ IP addresses, indicating that these items do exist. The second command displays "0" for one of the visitor’s IP addresses, indicating that the item certainly does not exist. Next Step # Learn more about RedisBloom in the Quick Start tutorial.',
'type': 'section',
's': 'test',
'position': 1,
'__score': 0.75
}, {
'doc_id': f'{TEST_URL}:section:1d42fac7043abe3c7e7debbb7dd14983',
'title': 'RedisBloom Tutorial',
'section_title': '',
'hierarchy': '[]',
'url': 'https://docs.redislabs.com/latest//test',
'body':
'Follow this link to know how to connect to a database Step 4. Getting Started with RedisBloom # In the next steps you will use some basic RedisBloom commands. You can run them from the Redis command-line interface (redis\\-cli) or use the CLI available in RedisInsight. (See part 2 of this tutorial to learn more about using the RedisInsight CLI.) To interact with RedisBloom, you use the BF.ADD and BF.EXISTS commands. Let’s go ahead and test drive some RedisBloom-specific operations. We will create a basic dataset based on unique visitors’ IP addresses, and you will see how to: Create a Bloom filter Determine whether or not an item exists in the Bloom filter Add one or more items to the Bloom filter Determine whether or not a unique visitor’s IP address exists Let’s walk through the process step-by-step: Create a Bloom filter # Use the BF.ADD command to add a unique visitor IP address to the Bloom filter as shown here: >> BF.ADD unique_visitors 10.94.214.120 (integer) 1 (1.75s) Copy Determine whether or not an item exists # Use the BF.EXISTS command to determine whether or not an item may exist in the Bloom filter: >> BF.EXISTS unique_visitors 10.94.214.120 (integer) 1 Copy >> BF.EXISTS unique_visitors 10.94.214.121 (integer) 0 (1.46s) Copy In the above example, the first command shows the result as “1”, indicating that the item may exist, whereas the second command displays "0", indicating that the item certainly may not exist. Add one or more items to the Bloom filter # Use the BF.MADD command to add one or more items to the Bloom filter, creating the filter if it does not yet exist. This command operates identically to BF.ADD, except it allows multiple inputs and returns multiple values: >> BF.MADD unique_visitors 10.94.214.100 10.94.214.200 10.94.214.210 10.94.214.212 1) (integer) 1 2) (integer) 1 3) (integer) 1 4) (integer) 1 Copy As shown above, the BF.MADD allows you to add one or more visitors’ IP addresses to the Bloom filter. Determine whether or not a unique visitor’s IP address exists # Use BF.MEXISTS to determine if one or more items may exist in the filter or not: >> BF.MEXISTS unique_visitors 10.94.214.200 10.94.214.212 1) (integer) 1 2) (integer) 1 Copy >> BF.MEXISTS unique_visitors 10.94.214.200 10.94.214.213 1) (integer) 1 2) (integer) 0 Copy In the above example, the first command shows the result as “1” for both the visitors’ IP addresses, indicating that these items do exist. The second command displays "0" for one of the visitor’s IP addresses, indicating that the item certainly does not exist. Next Step # Learn more about RedisBloom in the Quick Start tutorial.',
'type': 'section',
's': 'test',
'position': 2,
'__score': 0.75
}, {
'doc_id': f'{TEST_URL}:section:b676e4c351aafb9e89e71a391e9c6209',
'title': 'RedisBloom Tutorial',
'section_title': '',
'hierarchy': '[]',
'url': 'https://docs.redislabs.com/latest//test',
'body':
'In the next steps you will use some basic RedisBloom commands. You can run them from the Redis command-line interface (redis\\-cli) or use the CLI available in RedisInsight. (See part 2 of this tutorial to learn more about using the RedisInsight CLI.) To interact with RedisBloom, you use the BF.ADD and BF.EXISTS commands. Let’s go ahead and test drive some RedisBloom-specific operations. We will create a basic dataset based on unique visitors’ IP addresses, and you will see how to: Create a Bloom filter Determine whether or not an item exists in the Bloom filter Add one or more items to the Bloom filter Determine whether or not a unique visitor’s IP address exists Let’s walk through the process step-by-step: Create a Bloom filter # Use the BF.ADD command to add a unique visitor IP address to the Bloom filter as shown here: >> BF.ADD unique_visitors 10.94.214.120 (integer) 1 (1.75s) Copy Determine whether or not an item exists # Use the BF.EXISTS command to determine whether or not an item may exist in the Bloom filter: >> BF.EXISTS unique_visitors 10.94.214.120 (integer) 1 Copy >> BF.EXISTS unique_visitors 10.94.214.121 (integer) 0 (1.46s) Copy In the above example, the first command shows the result as “1”, indicating that the item may exist, whereas the second command displays "0", indicating that the item certainly may not exist. Add one or more items to the Bloom filter # Use the BF.MADD command to add one or more items to the Bloom filter, creating the filter if it does not yet exist. This command operates identically to BF.ADD, except it allows multiple inputs and returns multiple values: >> BF.MADD unique_visitors 10.94.214.100 10.94.214.200 10.94.214.210 10.94.214.212 1) (integer) 1 2) (integer) 1 3) (integer) 1 4) (integer) 1 Copy As shown above, the BF.MADD allows you to add one or more visitors’ IP addresses to the Bloom filter. Determine whether or not a unique visitor’s IP address exists # Use BF.MEXISTS to determine if one or more items may exist in the filter or not: >> BF.MEXISTS unique_visitors 10.94.214.200 10.94.214.212 1) (integer) 1 2) (integer) 1 Copy >> BF.MEXISTS unique_visitors 10.94.214.200 10.94.214.213 1) (integer) 1 2) (integer) 0 Copy In the above example, the first command shows the result as “1” for both the visitors’ IP addresses, indicating that these items do exist. The second command displays "0" for one of the visitor’s IP addresses, indicating that the item certainly does not exist. Next Step # Learn more about RedisBloom in the Quick Start tutorial.',
'type': 'section',
's': 'test',
'position': 3,
'__score': 0.75
}, {
'doc_id': f'{TEST_URL}:section:a1854eb001bc4bd83e4edef0dad86c71',
'title': 'RedisBloom Tutorial',
'section_title': '',
'hierarchy': '[]',
'url': 'https://docs.redislabs.com/latest//test',
'body':
'Learn more about RedisBloom in the Quick Start tutorial.',
'type': 'section',
's': 'test',
'position': 4,
'__score': 0.75
}]
for i, doc in enumerate(expected_section_docs, start=1):
key = keys.document(site.url, doc['doc_id'])
call = indexer.search_client.redis.hset.call_args_list[i]
print(call[1]['mapping']['body'])
assert call[0][0] == key
assert doc == call[1]['mapping']
| 118.237548
| 4,869
| 0.628483
| 4,228
| 30,860
| 4.533822
| 0.120151
| 0.009181
| 0.016068
| 0.022536
| 0.793573
| 0.775262
| 0.766446
| 0.755856
| 0.752726
| 0.723199
| 0
| 0.039415
| 0.308587
| 30,860
| 260
| 4,870
| 118.692308
| 0.858977
| 0.035677
| 0
| 0.530612
| 0
| 0.040816
| 0.830056
| 0.021623
| 0
| 0
| 0
| 0
| 0.040816
| 1
| 0.066327
| false
| 0
| 0.045918
| 0
| 0.132653
| 0.005102
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
098efa709354856031c3553811ef2fa3606c868e
| 19,428
|
py
|
Python
|
tests/model/blockchain/test_PersonalInfo.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 2
|
2021-08-19T12:35:25.000Z
|
2022-02-16T04:13:38.000Z
|
tests/model/blockchain/test_PersonalInfo.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 46
|
2021-09-02T03:22:05.000Z
|
2022-03-31T09:20:00.000Z
|
tests/model/blockchain/test_PersonalInfo.py
|
BoostryJP/ibet-Prime
|
924e7f8da4f8feea0a572e8b5532e09bcdf2dc99
|
[
"Apache-2.0"
] | 1
|
2021-11-17T23:18:27.000Z
|
2021-11-17T23:18:27.000Z
|
"""
Copyright BOOSTRY Co., Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
SPDX-License-Identifier: Apache-2.0
"""
import pytest
import base64
import json
from Crypto.Cipher import PKCS1_OAEP
from Crypto.PublicKey import RSA
from eth_keyfile import decode_keyfile_json
from web3 import Web3
from web3.middleware import geth_poa_middleware
from web3.exceptions import TimeExhausted
from unittest.mock import MagicMock
from unittest import mock
from config import WEB3_HTTP_PROVIDER, TX_GAS_LIMIT, CHAIN_ID
from app.model.blockchain import PersonalInfoContract
from app.utils.contract_utils import ContractUtils
from app.exceptions import SendTransactionError
from app.model.db import Account
from app.utils.e2ee_utils import E2EEUtils
from tests.account_config import config_eth_account
web3 = Web3(Web3.HTTPProvider(WEB3_HTTP_PROVIDER))
web3.middleware_onion.inject(geth_poa_middleware, layer=0)
def initialize(issuer, db):
_account = Account()
_account.issuer_address = issuer["address"]
_account.keyfile = issuer["keyfile_json"]
eoa_password = "password"
_account.eoa_password = E2EEUtils.encrypt(eoa_password)
_account.rsa_private_key = issuer["rsa_private_key"]
_account.rsa_public_key = issuer["rsa_public_key"]
rsa_password = "password"
_account.rsa_passphrase = E2EEUtils.encrypt(rsa_password)
db.add(_account)
db.commit()
private_key = decode_keyfile_json(
raw_keyfile_json=issuer["keyfile_json"],
password=eoa_password.encode("utf-8")
)
contract_address, _, _ = ContractUtils.deploy_contract("PersonalInfo", [], issuer["address"], private_key)
personal_info_contract = PersonalInfoContract(db, issuer["address"], contract_address)
return personal_info_contract
class TestGetInfo:
###########################################################################
# Normal Case
###########################################################################
# <Normal_1>
def test_normal_1(self, db):
issuer = config_eth_account("user1")
personal_info_contract = initialize(issuer, db)
# Set personal information data
setting_user = config_eth_account("user2")
rsa_password = "password"
rsa = RSA.importKey(personal_info_contract.issuer.rsa_public_key, passphrase=rsa_password)
cipher = PKCS1_OAEP.new(rsa)
data = {
"key_manager": "1234567890",
"name": "name_test1",
"postal_code": "1001000",
"address": "テスト住所",
"email": "sample@test.test",
"birth": "19801231"
}
ciphertext = base64.encodebytes(cipher.encrypt(json.dumps(data).encode('utf-8')))
contract = personal_info_contract.personal_info_contract
tx = contract.functions.register(issuer["address"], ciphertext).buildTransaction({
"nonce": web3.eth.getTransactionCount(setting_user["address"]),
"from": setting_user["address"],
"gas": TX_GAS_LIMIT,
"gasPrice": 0,
"chainId": CHAIN_ID
})
eoa_password = "password"
private_key = decode_keyfile_json(
raw_keyfile_json=setting_user["keyfile_json"],
password=eoa_password.encode("utf-8")
)
ContractUtils.send_transaction(tx, private_key)
# Run Test
get_info = personal_info_contract.get_info(setting_user["address"])
assert get_info == data
# <Normal_2>
# Unset Information
def test_normal_2(self, db):
issuer = config_eth_account("user1")
personal_info_contract = initialize(issuer, db)
# Set personal information data
setting_user = config_eth_account("user2")
contract = personal_info_contract.personal_info_contract
tx = contract.functions.register(issuer["address"], "").buildTransaction({
"nonce": web3.eth.getTransactionCount(setting_user["address"]),
"from": setting_user["address"],
"gas": TX_GAS_LIMIT,
"gasPrice": 0,
"chainId": CHAIN_ID
})
eoa_password = "password"
private_key = decode_keyfile_json(
raw_keyfile_json=setting_user["keyfile_json"],
password=eoa_password.encode("utf-8")
)
ContractUtils.send_transaction(tx, private_key)
# Run Test
get_info = personal_info_contract.get_info(setting_user["address"], default_value="test")
assert get_info == {
"key_manager": "test",
"name": "test",
"postal_code": "test",
"address": "test",
"email": "test",
"birth": "test"
}
###########################################################################
# Error Case
###########################################################################
# <Error_1>
# Invalid RSA Private Key
def test_error_1(self, db):
issuer = config_eth_account("user1")
personal_info_contract = initialize(issuer, db)
# Set personal information data
setting_user = config_eth_account("user2")
rsa_password = "password"
rsa = RSA.importKey(personal_info_contract.issuer.rsa_public_key, passphrase=rsa_password)
cipher = PKCS1_OAEP.new(rsa)
data = {
"key_manager": "1234567890",
"name": "name_test1",
"postal_code": "1001000",
"address": "テスト住所",
"email": "sample@test.test",
"birth": "19801231"
}
ciphertext = base64.encodebytes(cipher.encrypt(json.dumps(data).encode('utf-8')))
contract = personal_info_contract.personal_info_contract
tx = contract.functions.register(issuer["address"], ciphertext).buildTransaction({
"nonce": web3.eth.getTransactionCount(setting_user["address"]),
"from": setting_user["address"],
"gas": TX_GAS_LIMIT,
"gasPrice": 0,
"chainId": CHAIN_ID
})
eoa_password = "password"
private_key = decode_keyfile_json(
raw_keyfile_json=setting_user["keyfile_json"],
password=eoa_password.encode("utf-8")
)
ContractUtils.send_transaction(tx, private_key)
# Invalid RSA Private Key
personal_info_contract.issuer.rsa_private_key = "testtest"
# Run Test
get_info = personal_info_contract.get_info(setting_user["address"], default_value="test")
assert get_info == {
"key_manager": "test",
"name": "test",
"postal_code": "test",
"address": "test",
"email": "test",
"birth": "test"
}
# <Error_2>
# Decrypt Fail
def test_error_2(self, db):
issuer = config_eth_account("user1")
personal_info_contract = initialize(issuer, db)
# Set personal information data
setting_user = config_eth_account("user2")
contract = personal_info_contract.personal_info_contract
tx = contract.functions.register(issuer["address"], "testtest").buildTransaction({
"nonce": web3.eth.getTransactionCount(setting_user["address"]),
"from": setting_user["address"],
"gas": TX_GAS_LIMIT,
"gasPrice": 0,
"chainId": CHAIN_ID
})
eoa_password = "password"
private_key = decode_keyfile_json(
raw_keyfile_json=setting_user["keyfile_json"],
password=eoa_password.encode("utf-8")
)
ContractUtils.send_transaction(tx, private_key)
# Run Test
get_info = personal_info_contract.get_info(setting_user["address"], default_value="test")
assert get_info == {
"key_manager": "test",
"name": "test",
"postal_code": "test",
"address": "test",
"email": "test",
"birth": "test"
}
class TestModifyInfo:
###########################################################################
# Normal Case
###########################################################################
# <Normal_1>
def test_normal_1(self, db):
issuer = config_eth_account("user1")
personal_info_contract = initialize(issuer, db)
# Set personal information data
setting_user = config_eth_account("user2")
rsa_password = "password"
rsa = RSA.importKey(personal_info_contract.issuer.rsa_public_key, passphrase=rsa_password)
cipher = PKCS1_OAEP.new(rsa)
data = {
"key_manager": "1234567890",
"name": "name_test1",
"postal_code": "1001000",
"address": "テスト住所",
"email": "sample@test.test",
"birth": "19801231"
}
ciphertext = base64.encodebytes(cipher.encrypt(json.dumps(data).encode('utf-8')))
contract = personal_info_contract.personal_info_contract
tx = contract.functions.register(issuer["address"], ciphertext).buildTransaction({
"nonce": web3.eth.getTransactionCount(setting_user["address"]),
"from": setting_user["address"],
"gas": TX_GAS_LIMIT,
"gasPrice": 0,
"chainId": CHAIN_ID
})
eoa_password = "password"
private_key = decode_keyfile_json(
raw_keyfile_json=setting_user["keyfile_json"],
password=eoa_password.encode("utf-8")
)
ContractUtils.send_transaction(tx, private_key)
# Run Test
update_data = {
"key_manager": "0987654321",
"name": "name_test2",
"postal_code": "2002000",
"address": "テスト住所2",
"email": "sample@test.test2",
"birth": "19800101"
}
personal_info_contract.modify_info(setting_user["address"], update_data)
get_info = personal_info_contract.get_info(setting_user["address"])
assert get_info == update_data
###########################################################################
# Error Case
###########################################################################
# <Error_1>
# SendTransactionError(Timeout)
def test_error_1(self, db):
issuer = config_eth_account("user1")
personal_info_contract = initialize(issuer, db)
# Set personal information data
setting_user = config_eth_account("user2")
rsa_password = "password"
rsa = RSA.importKey(personal_info_contract.issuer.rsa_public_key, passphrase=rsa_password)
cipher = PKCS1_OAEP.new(rsa)
data = {
"key_manager": "1234567890",
"name": "name_test1",
"postal_code": "1001000",
"address": "テスト住所",
"email": "sample@test.test",
"birth": "19801231"
}
ciphertext = base64.encodebytes(cipher.encrypt(json.dumps(data).encode('utf-8')))
contract = personal_info_contract.personal_info_contract
tx = contract.functions.register(issuer["address"], ciphertext).buildTransaction({
"nonce": web3.eth.getTransactionCount(setting_user["address"]),
"from": setting_user["address"],
"gas": TX_GAS_LIMIT,
"gasPrice": 0,
"chainId": CHAIN_ID
})
eoa_password = "password"
private_key = decode_keyfile_json(
raw_keyfile_json=setting_user["keyfile_json"],
password=eoa_password.encode("utf-8")
)
ContractUtils.send_transaction(tx, private_key)
# Run Test
update_data = {
"key_manager": "0987654321",
"name": "name_test2",
"postal_code": "2002000",
"address": "テスト住所2",
"email": "sample@test.test2",
"birth": "19800101"
}
with mock.patch("web3.eth.Eth.waitForTransactionReceipt", MagicMock(side_effect=TimeExhausted())):
with pytest.raises(SendTransactionError):
personal_info_contract.modify_info(setting_user["address"], update_data)
# <Error_1>
# SendTransactionError(Other Error)
def test_error_2(self, db):
issuer = config_eth_account("user1")
personal_info_contract = initialize(issuer, db)
# Set personal information data
setting_user = config_eth_account("user2")
rsa_password = "password"
rsa = RSA.importKey(personal_info_contract.issuer.rsa_public_key, passphrase=rsa_password)
cipher = PKCS1_OAEP.new(rsa)
data = {
"key_manager": "1234567890",
"name": "name_test1",
"postal_code": "1001000",
"address": "テスト住所",
"email": "sample@test.test",
"birth": "19801231"
}
ciphertext = base64.encodebytes(cipher.encrypt(json.dumps(data).encode('utf-8')))
contract = personal_info_contract.personal_info_contract
tx = contract.functions.register(issuer["address"], ciphertext).buildTransaction({
"nonce": web3.eth.getTransactionCount(setting_user["address"]),
"from": setting_user["address"],
"gas": TX_GAS_LIMIT,
"gasPrice": 0,
"chainId": CHAIN_ID
})
eoa_password = "password"
private_key = decode_keyfile_json(
raw_keyfile_json=setting_user["keyfile_json"],
password=eoa_password.encode("utf-8")
)
ContractUtils.send_transaction(tx, private_key)
# Run Test
update_data = {
"key_manager": "0987654321",
"name": "name_test2",
"postal_code": "2002000",
"address": "テスト住所2",
"email": "sample@test.test2",
"birth": "19800101"
}
with mock.patch("web3.eth.Eth.waitForTransactionReceipt", MagicMock(side_effect=TypeError())):
with pytest.raises(SendTransactionError):
personal_info_contract.modify_info(setting_user["address"], update_data)
class TestGetRegisterEvent:
###########################################################################
# Normal Case
###########################################################################
# <Normal_1>
def test_normal_1(self, db):
issuer = config_eth_account("user1")
personal_info_contract = initialize(issuer, db)
block_number_before = web3.eth.blockNumber
# Set personal information data(Register)
setting_user = config_eth_account("user2")
rsa_password = "password"
rsa = RSA.importKey(personal_info_contract.issuer.rsa_public_key, passphrase=rsa_password)
cipher = PKCS1_OAEP.new(rsa)
data = {
"key_manager": "1234567890",
"name": "name_test1",
"postal_code": "1001000",
"address": "テスト住所",
"email": "sample@test.test",
"birth": "19801231"
}
ciphertext = base64.encodebytes(cipher.encrypt(json.dumps(data).encode('utf-8')))
contract = personal_info_contract.personal_info_contract
tx = contract.functions.register(issuer["address"], ciphertext).buildTransaction({
"nonce": web3.eth.getTransactionCount(setting_user["address"]),
"from": setting_user["address"],
"gas": TX_GAS_LIMIT,
"gasPrice": 0,
"chainId": CHAIN_ID
})
eoa_password = "password"
private_key = decode_keyfile_json(
raw_keyfile_json=setting_user["keyfile_json"],
password=eoa_password.encode("utf-8")
)
ContractUtils.send_transaction(tx, private_key)
block_number_after = web3.eth.blockNumber
events = personal_info_contract.get_register_event(block_number_before, block_number_after)
args = events[0]["args"]
assert args["account_address"] == setting_user["address"]
assert args["link_address"] == issuer["address"]
class TestGetModifyEvent:
###########################################################################
# Normal Case
###########################################################################
# <Normal_1>
def test_normal_1(self, db):
issuer = config_eth_account("user1")
personal_info_contract = initialize(issuer, db)
# Set personal information data
setting_user = config_eth_account("user2")
rsa_password = "password"
rsa = RSA.importKey(personal_info_contract.issuer.rsa_public_key, passphrase=rsa_password)
cipher = PKCS1_OAEP.new(rsa)
data = {
"key_manager": "1234567890",
"name": "name_test1",
"postal_code": "1001000",
"address": "テスト住所",
"email": "sample@test.test",
"birth": "19801231"
}
ciphertext = base64.encodebytes(cipher.encrypt(json.dumps(data).encode('utf-8')))
contract = personal_info_contract.personal_info_contract
tx = contract.functions.register(issuer["address"], ciphertext).buildTransaction({
"nonce": web3.eth.getTransactionCount(setting_user["address"]),
"from": setting_user["address"],
"gas": TX_GAS_LIMIT,
"gasPrice": 0,
"chainId": CHAIN_ID
})
eoa_password = "password"
private_key = decode_keyfile_json(
raw_keyfile_json=setting_user["keyfile_json"],
password=eoa_password.encode("utf-8")
)
ContractUtils.send_transaction(tx, private_key)
block_number_before = web3.eth.blockNumber
# Modify
update_data = {
"key_manager": "0987654321",
"name": "name_test2",
"postal_code": "2002000",
"address": "テスト住所2",
"email": "sample@test.test2",
"birth": "19800101"
}
ciphertext = base64.encodebytes(cipher.encrypt(json.dumps(update_data).encode('utf-8')))
contract = personal_info_contract.personal_info_contract
tx = contract.functions.modify(setting_user["address"], ciphertext).buildTransaction({
"nonce": web3.eth.getTransactionCount(issuer["address"]),
"from": issuer["address"],
"gas": TX_GAS_LIMIT,
"gasPrice": 0,
"chainId": CHAIN_ID
})
private_key = decode_keyfile_json(
raw_keyfile_json=issuer["keyfile_json"],
password=eoa_password.encode("utf-8")
)
ContractUtils.send_transaction(tx, private_key)
block_number_after = web3.eth.blockNumber
events = personal_info_contract.get_modify_event(block_number_before, block_number_after)
args = events[0]["args"]
assert args["account_address"] == setting_user["address"]
assert args["link_address"] == issuer["address"]
| 37.433526
| 110
| 0.591929
| 1,945
| 19,428
| 5.641645
| 0.108997
| 0.053586
| 0.08931
| 0.051034
| 0.817461
| 0.811173
| 0.805796
| 0.795498
| 0.795498
| 0.795498
| 0
| 0.029262
| 0.254169
| 19,428
| 518
| 111
| 37.505792
| 0.728019
| 0.064083
| 0
| 0.803109
| 0
| 0
| 0.148683
| 0.004409
| 0
| 0
| 0
| 0
| 0.023316
| 1
| 0.025907
| false
| 0.098446
| 0.064767
| 0
| 0.103627
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0996a4b6fd5e3c01b993e622e0bf40c115da27b3
| 365
|
py
|
Python
|
tests/RunTests/PythonTests/test2011_024.py
|
maurizioabba/rose
|
7597292cf14da292bdb9a4ef573001b6c5b9b6c0
|
[
"BSD-3-Clause"
] | 488
|
2015-01-09T08:54:48.000Z
|
2022-03-30T07:15:46.000Z
|
tests/RunTests/PythonTests/test2011_024.py
|
sujankh/rose-matlab
|
7435d4fa1941826c784ba97296c0ec55fa7d7c7e
|
[
"BSD-3-Clause"
] | 174
|
2015-01-28T18:41:32.000Z
|
2022-03-31T16:51:05.000Z
|
tests/RunTests/PythonTests/test2011_024.py
|
sujankh/rose-matlab
|
7435d4fa1941826c784ba97296c0ec55fa7d7c7e
|
[
"BSD-3-Clause"
] | 146
|
2015-04-27T02:48:34.000Z
|
2022-03-04T07:32:53.000Z
|
# test precedence
print 1 + 2 * 3 - 4 ** 5
print (1 + 2) * 3 - 4 ** 5
print (1 + 2 * 3) - 4 ** 5
print (1 + 2 * 3 - 4) ** 5
print (1 + 2 * 3 - 4 ** 5)
print (1 + 2) * (3 - 4) ** 5
print (1 + 2 * 3) - (4 ** 5)
print 1 + (2 * 3) - (4 ** 5)
print ((1 + 2) * 3 - 4) ** 5
print (((1 + 2) * 3) - 4) ** 5
print ((1 + 2) * 3) - 4 ** 5
print 1 + 2 * 3 - 4 ** (2 ** 2) ** 2
| 24.333333
| 36
| 0.372603
| 76
| 365
| 1.789474
| 0.105263
| 0.529412
| 0.617647
| 0.705882
| 0.875
| 0.875
| 0.875
| 0.875
| 0.875
| 0.875
| 0
| 0.261603
| 0.350685
| 365
| 14
| 37
| 26.071429
| 0.312236
| 0.041096
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 14
|
0998cf5d6ed8fe8ae28cc2ed3854e21037b2efd1
| 15,291
|
py
|
Python
|
pqviz/create_dataframes.py
|
mitre/PQViz
|
229e662c408e0532df44585d134b8e79eb6c4cf8
|
[
"Apache-2.0"
] | null | null | null |
pqviz/create_dataframes.py
|
mitre/PQViz
|
229e662c408e0532df44585d134b8e79eb6c4cf8
|
[
"Apache-2.0"
] | null | null | null |
pqviz/create_dataframes.py
|
mitre/PQViz
|
229e662c408e0532df44585d134b8e79eb6c4cf8
|
[
"Apache-2.0"
] | 1
|
2022-01-18T21:00:39.000Z
|
2022-01-18T21:00:39.000Z
|
from pathlib import Path
from ipywidgets import interact, interactive, fixed, interact_manual
import ipywidgets as widgets
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
import us
def create_prevalence_df(file_path, population_group):
"""
Creates a data frame that includes the prevalences and the demographic data
Parameters:
file_path: A folder with pq outputs to compare
population_group: Type of population, expected inputs ['Pediatric', 'Adult']
Returns:
A DataFrame where the rows are distinct demographic and prevalence numbers."""
# create a list of al the csvs in path
all_files = list(file_path.glob("**/*"))
# import census location data
# define an emptylist to create df from
all_df = []
# import files
if population_group == "Pediatric":
for filename in all_files:
print(f"Reading {filename}")
# read in csv
# Adding error-catching loop with output note for debugging
try:
df = pd.read_csv(filename, index_col=None, header=0)
sex = (
df[df["Order"] == 6]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
except Exception as e:
print(f"File {filename} has no data, skipping")
continue
# read in sex as outputed from pq
sex = (
df[df["Order"] == 6]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
df["sex"] = sex
# read in race as outputed from pq
race = (
df[df["Order"] == 7]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
df["race"] = race
# read in location code as outputed from pq
location_code = (
df[df["Order"] == 10]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
# identify state
if len(location_code) == 2:
state_cd = location_code
df["zcta3"] = np.nan
else:
zcta3 = []
states = []
for loc in [l.strip() for l in location_code.split(",")]:
zcta3.append(loc[2:])
states.append(loc[:2])
df["zcta3"] = ",".join(zcta3)
states = list(set(states))
state_cd = ",".join(states)
state = us.states.lookup(state_cd)
df["state"] = state
# read in age as outputed from pq
age = (
df[df["Order"] == 5]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
# converting to list
df["age"] = age
df["filename"] = filename
year = (
df[df["Order"] == 11]["Weight Category"]
.str.extract(":(.*)", expand=True)
.reset_index()
.at[0, 0]
)
df["year"] = year
# add dataframe to list
all_df.append(df)
if population_group == "Adult":
for filename in all_files:
print(f"Reading {filename}")
# read in csv
# Adding error-catching loop with output note for debugging
try:
df = pd.read_csv(filename, index_col=None, header=0)
sex = (
df[df["Order"] == 6]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
except Exception as e:
print(f"File {filename} has no data, skipping")
continue
# read in sex as outputed from pq
sex = (
df[df["Order"] == 6]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
df["sex"] = sex
# read in race as outputed from pq
race = (
df[df["Order"] == 8]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
df["race"] = race
# read in location code as outputed from pq
location_code = (
df[df["Order"] == 11]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
# identify state
if len(location_code) == 2:
state_cd = location_code
df["zcta3"] = np.nan
else:
zcta3 = []
states = []
for loc in [l.strip() for l in location_code.split(",")]:
zcta3.append(loc[2:])
states.append(loc[:2])
df["zcta3"] = ",".join(zcta3)
states = list(set(states))
state_cd = ",".join(states)
state = us.states.lookup(state_cd)
df["state"] = state
# read in age as outputed from pq
age = (
df[df["Order"] == 5]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
# converting to list
df["age"] = age
df["filename"] = filename
year = (
df[df["Order"] == 12]["Weight Category"]
.str.extract(":(.*)", expand=True)
.reset_index()
.at[0, 0]
)
df["year"] = year
# add dataframe to list
all_df.append(df)
all_df = pd.concat(all_df, axis=0, ignore_index=True, sort=True)
all_data = all_df[all_df["Order"] == 1].drop(columns="Order")
std_data = all_data.drop(
columns=[
"Crude Prevalence",
"Weighted Prevalence",
"Age-Adjusted Prevalence",
"Sample",
"Population",
]
)
prev_data = all_data.drop(
columns=[
"Crude Prevalence Standard Error",
"Weighted Prevalence Standard Error",
"Age-Adjusted Prevalence Standard Error",
"Sample",
"Population",
]
)
prev_data_melt = prev_data.melt(
id_vars=[
"Weight Category",
"sex",
"race",
"state",
"zcta3",
"age",
"filename",
"year",
],
value_name="Prevalence",
var_name="Prevalence type",
)
std_melt = std_data.melt(
id_vars=[
"Weight Category",
"sex",
"race",
"state",
"zcta3",
"age",
"filename",
"year",
],
value_name="Standard Error",
var_name="Prevalence type",
)
prev_data_melt["Prevalence type"] = prev_data_melt["Prevalence type"].str.split(
expand=True
)[0]
std_melt["Prevalence type"] = std_melt["Prevalence type"].str.split(expand=True)[0]
output_name = prev_data_melt.merge(
std_melt,
on=[
"Weight Category",
"sex",
"race",
"state",
"zcta3",
"age",
"filename",
"year",
"Prevalence type",
],
how="left",
)
output_name["Prevalence"] = output_name["Prevalence"].replace({".": np.NAN})
output_name["Standard Error"] = output_name["Standard Error"].replace({".": np.NAN})
return output_name
def create_population_df(file_path, population_group):
"""creates a data frame that includes the population numbers and the demographic data.
Population numbers come from American Community Survey
Parameters:
file_path: A folder with pq outputs to compare
population_group: Type of population, expected inputs ['Pediatric', 'Adult']
Returns:
A DataFrame where the rows are distinct demographic and prevalence numbers."""
# create a list of al the csvs in path
all_files = list(file_path.glob("**/*"))
# define an emptylist to create df from
all_df = []
# import files
if population_group == "Pediatric":
for filename in all_files:
print(f"Reading {filename}")
# read in csv
# Adding error-catching loop with output note for debugging
try:
df = pd.read_csv(filename, index_col=None, header=0)
sex = (
df[df["Order"] == 6]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
except Exception as e:
print(f"File {filename} has no data, skipping")
continue
# read in sex as outputed from pq
sex = (
df[df["Order"] == 6]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
df["sex"] = sex
# read in race as outputed from pq
race = (
df[df["Order"] == 7]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
df["race"] = race
# read in location code as outputed from pq
location_code = (
df[df["Order"] == 10]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
# identify state
if len(location_code) == 2:
state_cd = location_code
df["zcta3"] = np.nan
else:
zcta3 = []
states = []
for loc in [l.strip() for l in location_code.split(",")]:
zcta3.append(loc[2:])
states.append(loc[:2])
df["zcta3"] = ",".join(zcta3)
states = list(set(states))
state_cd = ",".join(states)
state = us.states.lookup(state_cd)
df["state"] = state
# read in age as outputed from pq
age = (
df[df["Order"] == 5]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
# converting to list
df["age"] = age
df["filename"] = filename
year = (
df[df["Order"] == 11]["Weight Category"]
.str.extract(":(.*)", expand=True)
.reset_index()
.at[0, 0]
)
df["year"] = year
# add dataframe to list
all_df.append(df)
if population_group == "Adult":
for filename in all_files:
print(f"Reading {filename}")
# read in csv
# Adding error-catching loop with output note for debugging
try:
df = pd.read_csv(filename, index_col=None, header=0)
sex = (
df[df["Order"] == 6]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
except Exception as e:
print(f"File {filename} has no data, skipping")
continue
# read in sex as outputed from pq
sex = (
df[df["Order"] == 6]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
df["sex"] = sex
# read in race as outputed from pq
race = (
df[df["Order"] == 8]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
df["race"] = race
# read in location code as outputed from pq
location_code = (
df[df["Order"] == 11]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
# identify state
if len(location_code) == 2:
state_cd = location_code
df["zcta3"] = np.nan
else:
zcta3 = []
states = []
for loc in [l.strip() for l in location_code.split(",")]:
zcta3.append(loc[2:])
states.append(loc[:2])
df["zcta3"] = ",".join(zcta3)
states = list(set(states))
state_cd = ",".join(states)
state = us.states.lookup(state_cd)
df["state"] = state
# read in age as outputed from pq
age = (
df[df["Order"] == 5]["Weight Category"]
.str.extract("\(([^)]+)\)", expand=True)
.reset_index()
.at[0, 0]
)
# converting to list
df["age"] = age
df["filename"] = filename
year = (
df[df["Order"] == 12]["Weight Category"]
.str.extract(":(.*)", expand=True)
.reset_index()
.at[0, 0]
)
df["year"] = year
# add dataframe to list
all_df.append(df)
all_df = pd.concat(all_df, axis=0, ignore_index=True, sort=True)
all_data = all_df[all_df["Order"] == 1].drop(columns="Order")
pop_data = all_data.drop(
columns=[
"Crude Prevalence",
"Weighted Prevalence",
"Age-Adjusted Prevalence",
"Crude Prevalence Standard Error",
"Weighted Prevalence Standard Error",
"Age-Adjusted Prevalence Standard Error",
]
)
output_name = pop_data.melt(
id_vars=[
"Weight Category",
"sex",
"race",
"state",
"zcta3",
"age",
"filename",
"year",
],
value_name="Population",
var_name="Population type",
)
output_name["Population"] = output_name["Population"].replace({".": np.NAN})
output_name["Population"] = (
output_name["Population"].astype(str).str.replace(",", "").astype(float)
)
return output_name
| 32.259494
| 90
| 0.444444
| 1,515
| 15,291
| 4.390099
| 0.113531
| 0.058939
| 0.032476
| 0.086604
| 0.885431
| 0.880319
| 0.86829
| 0.857766
| 0.84664
| 0.839723
| 0
| 0.014294
| 0.423517
| 15,291
| 473
| 91
| 32.327696
| 0.740216
| 0.125891
| 0
| 0.814016
| 0
| 0
| 0.146864
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005391
| false
| 0
| 0.021563
| 0
| 0.032345
| 0.021563
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
09d8e8ba8a776f1863aedf907dc9898feca39d04
| 5,886
|
py
|
Python
|
SystemComponent/ProgressBar.py
|
leejaymin/QDroid
|
13ff9d26932378513a7c9f0038eb59b922ed06eb
|
[
"Apache-2.0"
] | null | null | null |
SystemComponent/ProgressBar.py
|
leejaymin/QDroid
|
13ff9d26932378513a7c9f0038eb59b922ed06eb
|
[
"Apache-2.0"
] | null | null | null |
SystemComponent/ProgressBar.py
|
leejaymin/QDroid
|
13ff9d26932378513a7c9f0038eb59b922ed06eb
|
[
"Apache-2.0"
] | null | null | null |
#! python2.7
## -*- coding: utf-8 -*-
#===============================================================================
# @author: kun
#===============================================================================
import os,sys
current_path = os.getcwd()
parent_path = os.path.abspath(os.path.join(os.getcwd(), os.path.pardir))
if current_path not in sys.path:
sys.path.append(current_path)
if parent_path not in sys.path:
sys.path.append(parent_path)
from ViewManagement import ParseElement
class ProgressBar():
'''
ProgressBar
'''
def __init__(self, tree_nodes_list):
self.tree_nodes_list = tree_nodes_list
self.ProgressBar_ClassName = "android.widget.ProgressBar"
'''
@return: percent value
'''
def getCurrentProgress(self):
for node in self.tree_nodes_list:
if node.mClassName==self.ProgressBar_ClassName:
element_parser = ParseElement.ParseElement(node.mElement)
element_parser.parseElmentData()
if element_parser.getBoolean(element_parser.properties_dict["progress:isIndeterminate()"], True):
continue
max_value = element_parser.getInt(element_parser.properties_dict["progress:getMax()"], 100)
current_value = element_parser.getInt(element_parser.properties_dict["progress:getProgress()"], 0)
second_value = element_parser.getInt(element_parser.properties_dict["progress:getSecondaryProgress()"], 0)
percent = float(current_value)/float(max_value) * 100
if percent>0 and percent<=100:
return percent
percent = float(second_value)/float(max_value) * 100
if percent>0 and percent<=100:
return percent
return None
'''
@return: percent value
'''
def getProgressById(self, id):
if 0==len(id):
return None
real_id = "id/"+id
for node in self.tree_nodes_list:
if (node.mClassName==self.ProgressBar_ClassName) and (real_id==node.mId):
element_parser = ParseElement.ParseElement(node.mElement)
element_parser.parseElmentData()
if element_parser.getBoolean(element_parser.properties_dict["progress:isIndeterminate()"], True):
continue
max_value = element_parser.getInt(element_parser.properties_dict["progress:getMax()"], 100)
current_value = element_parser.getInt(element_parser.properties_dict["progress:getProgress()"], 0)
second_value = element_parser.getInt(element_parser.properties_dict["progress:getSecondaryProgress()"], 0)
percent = float(current_value)/float(max_value) * 100
if percent>0 and percent<=100:
return percent
percent = float(second_value)/float(max_value) * 100
if percent>0 and percent<=100:
return percent
return None
'''
@return: percent value
'''
def getProgressByText(self, text):
if 0==len(text):
return None
for node in self.tree_nodes_list:
if (node.mClassName==self.ProgressBar_ClassName) and (node.mText != None) and (text==node.mText):
element_parser = ParseElement.ParseElement(node.mElement)
element_parser.parseElmentData()
if element_parser.getBoolean(element_parser.properties_dict["progress:isIndeterminate()"], True):
continue
max_value = element_parser.getInt(element_parser.properties_dict["progress:getMax()"], 100)
current_value = element_parser.getInt(element_parser.properties_dict["progress:getProgress()"], 0)
second_value = element_parser.getInt(element_parser.properties_dict["progress:getSecondaryProgress()"], 0)
percent = float(current_value)/float(max_value) * 100
if percent>0 and percent<=100:
return percent
percent = float(second_value)/float(max_value) * 100
if percent>0 and percent<=100:
return percent
return None
def getProgressByKeyWord(self, key_word):
if 0==len(key_word):
return None
for node in self.tree_nodes_list:
if (node.mClassName==self.ProgressBar_ClassName) and (node.mText != None) and (node.mText.find(key_word)>=0):
element_parser = ParseElement.ParseElement(node.mElement)
element_parser.parseElmentData()
if element_parser.getBoolean(element_parser.properties_dict["progress:isIndeterminate()"], True):
continue
max_value = element_parser.getInt(element_parser.properties_dict["progress:getMax()"], 100)
current_value = element_parser.getInt(element_parser.properties_dict["progress:getProgress()"], 0)
second_value = element_parser.getInt(element_parser.properties_dict["progress:getSecondaryProgress()"], 0)
percent = float(current_value)/float(max_value) * 100
if percent>0 and percent<=100:
return percent
percent = float(second_value)/float(max_value) * 100
if percent>0 and percent<=100:
return percent
return None
| 44.590909
| 123
| 0.563541
| 565
| 5,886
| 5.661947
| 0.134513
| 0.162551
| 0.115036
| 0.135042
| 0.833073
| 0.833073
| 0.833073
| 0.833073
| 0.814942
| 0.814942
| 0
| 0.020776
| 0.321271
| 5,886
| 132
| 124
| 44.590909
| 0.779975
| 0.036697
| 0
| 0.724138
| 0
| 0
| 0.076411
| 0.063275
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057471
| false
| 0
| 0.022989
| 0
| 0.264368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
09e3e62e1f9401e4c9c55ed90251e2f62214d8bb
| 113
|
py
|
Python
|
market/baselines/baselines/bench/__init__.py
|
LuoMaimingS/django_virtual_stock_market
|
cfeccdbb906f9998ec0a0633c2d2f39cdd87bf85
|
[
"BSD-3-Clause"
] | 1
|
2021-05-29T23:33:41.000Z
|
2021-05-29T23:33:41.000Z
|
market/baselines/baselines/bench/__init__.py
|
LuoMaimingS/django_virtual_stock_market
|
cfeccdbb906f9998ec0a0633c2d2f39cdd87bf85
|
[
"BSD-3-Clause"
] | null | null | null |
market/baselines/baselines/bench/__init__.py
|
LuoMaimingS/django_virtual_stock_market
|
cfeccdbb906f9998ec0a0633c2d2f39cdd87bf85
|
[
"BSD-3-Clause"
] | null | null | null |
from market.baselines.baselines.bench.benchmarks import *
from market.baselines.baselines.bench.monitor import *
| 37.666667
| 57
| 0.840708
| 14
| 113
| 6.785714
| 0.5
| 0.210526
| 0.4
| 0.589474
| 0.694737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070796
| 113
| 2
| 58
| 56.5
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
11359ecc01d91369841dc14fe135600f99d6e2c7
| 3,311
|
py
|
Python
|
basenef/mixin/arithematic_ops.py
|
bill52547/NEF
|
d1afc940f3a56569739738f21ba90e118bd5ce8b
|
[
"Apache-2.0"
] | null | null | null |
basenef/mixin/arithematic_ops.py
|
bill52547/NEF
|
d1afc940f3a56569739738f21ba90e118bd5ce8b
|
[
"Apache-2.0"
] | null | null | null |
basenef/mixin/arithematic_ops.py
|
bill52547/NEF
|
d1afc940f3a56569739738f21ba90e118bd5ce8b
|
[
"Apache-2.0"
] | null | null | null |
# encoding: utf-8
'''
@author: Minghao Guo
@contact: mh.guo0111@gmail.com
@software: basenef
@file: arithematic_ops.py
@date: 4/13/2019
@desc:
'''
import operator
import numpy as np
from copy import copy
class UnaryOpMixin:
def abs(self):
return self._replace(data = np.abs(self.data))
def __neg__(self):
return self._replace(data = -self.data)
class BinaryOpMixin:
def __eq__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data + other)
else:
return self._replace(data = self.data + other.data)
def __gt__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data > other)
else:
return self._replace(data = self.data > other.data)
def __ge__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data >= other)
else:
return self._replace(data = self.data >= other.data)
def __lt__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data < other)
else:
return self._replace(data = self.data < other.data)
def __le__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data <= other)
else:
return self._replace(data = self.data <= other.data)
def __add__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data + other)
else:
return self._replace(data = self.data + other.data)
def __sub__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data - other)
else:
return self._replace(data = self.data - other.data)
def __mul__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data * other)
else:
return self._replace(data = self.data * other.data)
def __truediv__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
new_data = self.data / other
else:
new_data = self.data / other.data
new_data[new_data == np.inf] = 0.0
return self._replace(data = new_data)
def __floordiv__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data // other)
else:
return self._replace(data = self.data // other.data)
def __mod__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data % other)
else:
return self._replace(data = self.data % other.data)
def __pow__(self, other):
if np.isscalar(other) or isinstance(other, np.ndarray):
return self._replace(data = self.data ** other)
else:
return self._replace(data = self.data ** other.data)
class ArithematicalOpMixin(BinaryOpMixin, UnaryOpMixin):
pass
| 32.782178
| 64
| 0.615222
| 419
| 3,311
| 4.663484
| 0.150358
| 0.106448
| 0.217503
| 0.26868
| 0.816786
| 0.77738
| 0.762538
| 0.762538
| 0.762538
| 0.762538
| 0
| 0.005797
| 0.270613
| 3,311
| 100
| 65
| 33.11
| 0.803313
| 0.041377
| 0
| 0.383562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.191781
| false
| 0.013699
| 0.041096
| 0.027397
| 0.616438
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
febc2ad7f300911d8f345a303c08f80a1be48dbd
| 140
|
py
|
Python
|
codewars/8kyu/amrlotfy77/Capitalization and Mutability/main.py
|
ictcubeMENA/Training_one
|
dff6bee96ba42babe4888e5cf9a9448a6fd93fc3
|
[
"MIT"
] | null | null | null |
codewars/8kyu/amrlotfy77/Capitalization and Mutability/main.py
|
ictcubeMENA/Training_one
|
dff6bee96ba42babe4888e5cf9a9448a6fd93fc3
|
[
"MIT"
] | 2
|
2019-01-22T10:53:42.000Z
|
2019-01-31T08:02:48.000Z
|
codewars/8kyu/amrlotfy77/Capitalization and Mutability/main.py
|
ictcubeMENA/Training_one
|
dff6bee96ba42babe4888e5cf9a9448a6fd93fc3
|
[
"MIT"
] | 13
|
2019-01-22T10:37:42.000Z
|
2019-01-25T13:30:43.000Z
|
def capitalizeWord(word):
c= word[0].upper()+word[1:]
return c
def capitalizeWord1(word):
return word[0].upper() + word[1:]
| 15.555556
| 37
| 0.621429
| 20
| 140
| 4.35
| 0.45
| 0.114943
| 0.229885
| 0.321839
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044643
| 0.2
| 140
| 9
| 37
| 15.555556
| 0.732143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
fed83494462fbbecba1da52eba2ca79f4aa66c73
| 6,337
|
py
|
Python
|
operator/src/playbook_utils.py
|
j-griffith/pushbutton-ci
|
e1adc74aeceec2c5a5678b0c556a3147b49ee6dc
|
[
"Apache-2.0"
] | null | null | null |
operator/src/playbook_utils.py
|
j-griffith/pushbutton-ci
|
e1adc74aeceec2c5a5678b0c556a3147b49ee6dc
|
[
"Apache-2.0"
] | null | null | null |
operator/src/playbook_utils.py
|
j-griffith/pushbutton-ci
|
e1adc74aeceec2c5a5678b0c556a3147b49ee6dc
|
[
"Apache-2.0"
] | null | null | null |
import subprocess
def stackit(cloud, server, conf_file, branch='master', cinder_branch='master',
use_floating_ip=False, results_dir='/tmp'):
"""Install devstack on the specified OpenStack Instance (server).
Assumes a running Instance, installs devsack based on the provided
parameters.
:param cloud (os-cloud-config obj):
The cloud you're operating on
:param server (shade object):
Server to install devstack on
:param conf_file (str):
location of local.conf template file
:param branch (Optional str):
devstack branch to use
:param cinder_branch (Optional str):
cinder branch to use, ie patchset id
(refs/changes/02/291302/2)
:param use_floating_ip (Optional bool):
By default we use the private IP of
the Instance to communicate with the Instance, set to True if you're
running from a machine NOT in the cloud and need to use floating IP
access.
:param results_dir (Optional str):
Location to dump log output from Ansible,
default is /tmp
Returns:
(bool, str): True if succesful, False otherwise, and output from
ansible playbook run.
"""
host_ip = cloud.get_server_private_ip(server)
if use_floating_ip:
host_ip = cloud.get_server_public_ip(server)
vars = 'hosts=%s,' % host_ip
vars += ' devstack_conf=%s' % conf_file
vars += ' results_dir=%s' % results_dir
vars += ' patchset_ref=%s' % cinder_branch
cmd = 'ansible-playbook /src/stackbooks/install_devstack.yml --extra-vars '\
'\"%s\" -i %s,' % (vars, host_ip)
ansible_proc = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE)
output = ansible_proc.communicate()[0]
if ansible_proc.returncode == 0:
return (True, output)
else:
return (False, output)
def run_tempest(cloud, server, use_floating_ip=False, results_dir='/tmp'):
"""Run tempest on the specified OpenStack Instance (server).
Assumes a running devstack Instance with Tempest installed and configured.
:param cloud (os-cloud-config obj):
The cloud you're operating on
:param server (shade object):
Server to install devstack on
:param use_floating_ip (Optional bool):
By default we use the private IP of
the Instance to communicate with the Instance, set to True if you're
running from a machine NOT in the cloud and need to use floating IP
access.
:param results_dir (Optional str):
Location to dump log output from Ansible,
default is /tmp
Returns:
(bool, str): True if succesful, False otherwise, and output from
ansible playbook run.
"""
host_ip = cloud.get_server_private_ip(server)
if use_floating_ip:
host_ip = cloud.get_server_public_ip(server)
vars = 'hosts=%s,' % host_ip
vars += ' results_dir=%s' % results_dir
cmd = 'ansible-playbook /src/stackbooks/run_tempest.yml --extra-vars '\
'\"%s\" -i %s,' % (vars, host_ip)
ansible_proc = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE)
output = ansible_proc.communicate()[0]
if ansible_proc.returncode == 0:
return (True, output)
else:
return (False, output)
def gather_logs(cloud, server, upload_script,
use_floating_ip=False, results_dir='/tmp'):
"""Gather up logs from a CI Run.
Gathers logs including stack.log.out, tempest output etc.
:param cloud (os-cloud-config obj):
The cloud you're operating on
:param server (shade object):
Server to install devstack on
:param upload_script (str):
Path of bash script to execute that gathers up the logs
:param use_floating_ip (Optional bool):
By default we use the private IP of
the Instance to communicate with the Instance, set to True if you're
running from a machine NOT in the cloud and need to use floating IP
access.
:param results_dir (Optional str):
Location to dump log output from Ansible,
default is /tmp
Returns:
(bool, str): True if succesful, False otherwise, and output from
ansible playbook run.
"""
host_ip = cloud.get_server_private_ip(server)
if use_floating_ip:
host_ip = cloud.get_server_public_ip(server)
vars = 'hosts=%s,' % host_ip
vars += ' results_dir=%s' % results_dir
vars += ' upload_script=%s' % upload_script
vars += ' instance_name=%s' % server.get('name')
cmd = 'ansible-playbook /src/stackbooks/run_cleanup.yml --extra-vars '\
'\"%s\" -i %s,' % (vars, host_ip)
ansible_proc = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE)
output = ansible_proc.communicate()[0]
if ansible_proc.returncode == 0:
return (True, output)
else:
return (False, output)
def publish_results(web_server, publish_dir, local_results_dir):
"""Publish logs from CI Run on web server.
Gathers logs including stack.log.out, tempest output etc.
:param cloud (os-cloud-config obj):
The cloud you're operating on
:param server (shade object):
Server to install devstack on
:param upload_script (str):
Path of bash script to execute that gathers up the logs
:param use_floating_ip (Optional bool):
By default we use the private IP of
the Instance to communicate with the Instance, set to True if you're
running from a machine NOT in the cloud and need to use floating IP
access.
:param local_results_dir (Optional str):
Location of logs on host node,
default is /tmp
Returns:
(bool, str): True if succesful, False otherwise, and output from
ansible playbook run.
"""
vars = 'hosts=%s,' % web_server
vars += ' results_dir=%s' % local_results_dir
vars += ' publish_dir=%s' % publish_dir
cmd = 'ansible-playbook /src/stackbooks/publish.yml --extra-vars '\
'\"%s\" -i %s,' % (vars, web_server)
ansible_proc = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE)
output = ansible_proc.communicate()[0]
return output
| 37.276471
| 80
| 0.6451
| 856
| 6,337
| 4.651869
| 0.150701
| 0.040181
| 0.045706
| 0.021095
| 0.808137
| 0.793069
| 0.767454
| 0.73104
| 0.73104
| 0.704922
| 0
| 0.003437
| 0.265425
| 6,337
| 169
| 81
| 37.497041
| 0.851987
| 0.500552
| 0
| 0.709677
| 0
| 0
| 0.179851
| 0.044342
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0
| 0.016129
| 0
| 0.193548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3a19011d29beeed0cc8e99e644fb905212939265
| 6,907
|
py
|
Python
|
flow/db/sql/trigger.py
|
avivfaraj/money-flow
|
8fb548e747798fec9a19152ce491b991284b096e
|
[
"Apache-2.0"
] | null | null | null |
flow/db/sql/trigger.py
|
avivfaraj/money-flow
|
8fb548e747798fec9a19152ce491b991284b096e
|
[
"Apache-2.0"
] | null | null | null |
flow/db/sql/trigger.py
|
avivfaraj/money-flow
|
8fb548e747798fec9a19152ce491b991284b096e
|
[
"Apache-2.0"
] | null | null | null |
from sql.insert import execute_query
def triggers(conn):
# delete_transaction_trigger(conn)
# update_transaction_trigger(conn)
insert_transaction_trigger(conn)
# def delete_transaction_trigger(conn):
# execute_query(conn = conn,
# query = """CREATE TRIGGER IF NOT EXISTS delete_transaction
# BEFORE DELETE
# ON transaction_details
# BEGIN
# INSERT INTO _Variables (diff) VALUES (CASE
# WHEN OLD.type = "Deposit"
# THEN -(OLD.quantity * OLD.price)
# ELSE OLD.quantity * OLD.price
# END);
# UPDATE transaction_history
# SET balance = balance + (SELECT diff FROM _Variables)
# WHERE trans_id IN (SELECT id FROM transaction_details WHERE account_id = OLD.account_id) AND trans_id > OLD.id;
# UPDATE bank
# SET balance = balance + (SELECT diff FROM _Variables)
# WHERE id = OLD.account_id;
# DELETE FROM _Variables WHERE id =1;
# END;
# """)
# def update_transaction_trigger(conn):
# execute_query(conn = conn,
# query = """
# CREATE TRIGGER IF NOT EXISTS update_transaction
# AFTER UPDATE
# ON transaction_details
# BEGIN
# INSERT INTO _Variables (diff) VALUES (CASE
# WHEN OLD.type = "Deposit" THEN
# CASE WHEN (NEW.quantity * NEW.price > OLD.quantity * OLD.price)
# THEN (NEW.quantity * NEW.price - OLD.quantity * OLD.price)
# ELSE -(OLD.quantity * OLD.price - NEW.quantity * NEW.price)
# END
# ELSE
# CASE WHEN (NEW.quantity * NEW.price > OLD.quantity * OLD.price)
# THEN -(NEW.quantity * NEW.price - OLD.quantity * OLD.price)
# ELSE (OLD.quantity * OLD.price - NEW.quantity * NEW.price)
# END
# END);
# UPDATE transaction_history
# SET
# balance = balance + (SELECT diff FROM _Variables),
# total = CASE OLD.type
# WHEN "Withdrawal"
# THEN -(NEW.quantity * NEW.price)
# ELSE (NEW.quantity * NEW.price)
# END
# WHERE trans_id = OLD.id;
# UPDATE transaction_history
# SET balance = balance + (SELECT diff FROM _Variables)
# WHERE trans_id IN (SELECT id FROM transaction_details WHERE account_id = OLD.account_id) AND trans_id > OLD.id;
# UPDATE bank
# SET balance = balance + (SELECT diff FROM _Variables)
# WHERE id = OLD.account_id;
# DELETE FROM _Variables WHERE id =1;
# END;
# """)
def insert_transaction_trigger(conn):
execute_query(conn = conn,
query = """
CREATE TRIGGER new_withdrawal
AFTER INSERT
ON trans_details
WHEN new.type = 'Withdrawal'
BEGIN
INSERT INTO _Variables VALUES (
(NEW.quantity * NEW.price - NEW.discount),
(
SELECT accountID
FROM Payment
WHERE paymentID = NEW.paymentID
)
);
UPDATE Bank
SET balance = balance - (
SELECT total
FROM _Variables
)
WHERE Bank.accountID = (
SELECT bankID
FROM _Variables
);
INSERT INTO trans_history (
transID,
total,
balance
)
VALUES (
NEW.transID,
- (
SELECT total
FROM _Variables
),
(
SELECT balance
FROM Bank
WHERE bank.accountID = (
SELECT bankID
FROM _Variables
)
)
);
DELETE FROM _Variables;
END;
""")
execute_query(conn = conn,
query = """
CREATE TRIGGER new_deposit
AFTER INSERT
ON trans_details
WHEN NEW.type = 'Deposit'
BEGIN
INSERT INTO _Variables VALUES (
(NEW.quantity * NEW.price - NEW.discount),
(
SELECT accountID
FROM Payment
WHERE paymentID = NEW.paymentID
)
);
UPDATE Bank
SET balance = balance + (
SELECT total
FROM _Variables
)
WHERE Bank.accountID = (
SELECT bankID
FROM _Variables
);
INSERT INTO trans_history (
transID,
total,
balance
)
VALUES (
NEW.transID,
(
SELECT total
FROM _Variables
),
(
SELECT balance
FROM Bank
WHERE bank.accountID = (
SELECT bankID
FROM _Variables
)
)
);
DELETE FROM _Variables;
END;""")
| 38.586592
| 127
| 0.363399
| 462
| 6,907
| 5.294372
| 0.123377
| 0.090352
| 0.057236
| 0.077678
| 0.883483
| 0.86018
| 0.86018
| 0.86018
| 0.812756
| 0.795585
| 0
| 0.000695
| 0.583466
| 6,907
| 178
| 128
| 38.803371
| 0.849496
| 0.411467
| 0
| 0.701031
| 0
| 0
| 0.916479
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020619
| false
| 0
| 0.010309
| 0
| 0.030928
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
28bd484f4bc6d570763a0288eee8dc7d2fae9e6f
| 405
|
py
|
Python
|
temboo/core/Library/CloudMine/FileStorage/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/CloudMine/FileStorage/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/CloudMine/FileStorage/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.CloudMine.FileStorage.DeleteFile import DeleteFile, DeleteFileInputSet, DeleteFileResultSet, DeleteFileChoreographyExecution
from temboo.Library.CloudMine.FileStorage.GetFile import GetFile, GetFileInputSet, GetFileResultSet, GetFileChoreographyExecution
from temboo.Library.CloudMine.FileStorage.SetFile import SetFile, SetFileInputSet, SetFileResultSet, SetFileChoreographyExecution
| 101.25
| 144
| 0.896296
| 33
| 405
| 11
| 0.545455
| 0.082645
| 0.140496
| 0.214876
| 0.305785
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051852
| 405
| 3
| 145
| 135
| 0.945313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
28c07a1aab05636bc51d85f4131b595adcf5295a
| 1,221
|
py
|
Python
|
r3c0nutils/user_agent.py
|
markgacoka/r3c0n
|
ac64614d10d176b9de2170ce8758a6aa75d75f54
|
[
"MIT"
] | 4
|
2022-03-06T16:42:23.000Z
|
2022-03-09T02:29:08.000Z
|
r3c0nutils/user_agent.py
|
markgacoka/r3c0n
|
ac64614d10d176b9de2170ce8758a6aa75d75f54
|
[
"MIT"
] | null | null | null |
r3c0nutils/user_agent.py
|
markgacoka/r3c0n
|
ac64614d10d176b9de2170ce8758a6aa75d75f54
|
[
"MIT"
] | 1
|
2022-03-07T03:37:51.000Z
|
2022-03-07T03:37:51.000Z
|
import random
def GET_UA():
uastrings = [
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25",\
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0",\
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.1.17 (KHTML, like Gecko) Version/7.1 Safari/537.85.10",\
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko",\
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0",\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36"\
]
return random.choice(uastrings)
| 71.823529
| 133
| 0.659296
| 220
| 1,221
| 3.631818
| 0.236364
| 0.100125
| 0.112641
| 0.140175
| 0.737171
| 0.737171
| 0.737171
| 0.737171
| 0.654568
| 0.654568
| 0
| 0.220896
| 0.176904
| 1,221
| 17
| 134
| 71.823529
| 0.574129
| 0
| 0
| 0
| 0
| 0.666667
| 0.816694
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e92dd878e67c9ef0864b50a0e29bc7e795bab768
| 105
|
py
|
Python
|
core/sync/manager.py
|
yuwilbur/birthday29
|
7a2c8069639b27b20bc0903d2cf6c212b398b4d9
|
[
"MIT"
] | null | null | null |
core/sync/manager.py
|
yuwilbur/birthday29
|
7a2c8069639b27b20bc0903d2cf6c212b398b4d9
|
[
"MIT"
] | null | null | null |
core/sync/manager.py
|
yuwilbur/birthday29
|
7a2c8069639b27b20bc0903d2cf6c212b398b4d9
|
[
"MIT"
] | null | null | null |
class Manager(object):
def setup(self):
return
def update(self):
return
def stop(self):
return
| 11.666667
| 22
| 0.685714
| 15
| 105
| 4.8
| 0.6
| 0.416667
| 0.361111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 105
| 9
| 23
| 11.666667
| 0.857143
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0
| 0.428571
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
3a6f8e589ff6cc6d7f402421acc3296fa52f91bc
| 14,842
|
py
|
Python
|
fibonacci21decomp.py
|
gavin4d/Fibonacci-Magic
|
3e5c57e6ac6a190e5e9e6d62e34d2d8621ef47cc
|
[
"CC0-1.0"
] | 1
|
2021-12-28T19:10:58.000Z
|
2021-12-28T19:10:58.000Z
|
fibonacci21decomp.py
|
gavin4d/Fibonacci-Magic
|
3e5c57e6ac6a190e5e9e6d62e34d2d8621ef47cc
|
[
"CC0-1.0"
] | null | null | null |
fibonacci21decomp.py
|
gavin4d/Fibonacci-Magic
|
3e5c57e6ac6a190e5e9e6d62e34d2d8621ef47cc
|
[
"CC0-1.0"
] | null | null | null |
from PIL.Image import FASTOCTREE
from manim import *
from functions import *
import color
def moveEquation(equations,loop,baseText,t1,t2,t3,t4,self):
equations[loop].add(baseText.copy(), t1[loop].copy(), t2[7-loop].copy(), t3[loop].copy(), t4[7-loop].copy())
self.play(equations[loop].animate.shift(LEFT * 6 + UP * (4.75 - loop * 0.5)))
return loop + 1
class DecompDot(Scene):
def construct(self):
loop = 0
fibo = [0,1,1,2,3,5,8,13,21]
self.camera.background_color = color.BACKGROUND
dots = [Dot().set_color(color.RED).move_to(UP * 0.25 * (10-i) + RIGHT * 3) for i in range(0,21)]
baseText = Text('× + ×').scale(0.5).set_color(BLACK).move_to(DOWN * 3 + RIGHT * 3)
name = Text('Fibonacci Decomposition').set_color(BLACK).move_to(UP * 3)
t1 = VGroup()
t2 = VGroup()
t3 = VGroup()
t4 = VGroup()
for n in range(0,8):
t1.add(Text(str(fibo[8-n])).set_color(color.RED))
t2.add(Text(str(fibo[8-n])).set_color(color.RED))
t3.add(Text(str(fibo[8-n-1])).set_color(color.BLUE))
t4.add(Text(str(fibo[8-n-1])).set_color(color.BLUE))
t1.scale(0.5).arrange(DOWN).move_to(LEFT * (1.15 - 3) + DOWN * (1.75 + 3))
t2.scale(0.5).arrange(DOWN).move_to(LEFT * (0.4 - 3) + UP * (1.75 - 3))
t3.scale(0.5).arrange(DOWN).move_to(RIGHT * (0.4 + 3) + DOWN * (1.75 + 3))
t4.scale(0.5).arrange(DOWN).move_to(RIGHT * (1.15 + 3) + UP * (1.75 - 3))
self.add(t1, t2, t3, t4)
numberhidebox1 = Square().scale(2).move_to(UP * (2.25 - 3) + RIGHT * 3.5)
numberhidebox1.set_fill(color.BACKGROUND, opacity=1).set_color(color.BACKGROUND)
numberhidebox2 = Square().scale(2).move_to(DOWN * 5.25 + RIGHT * 3.5)
numberhidebox2.set_fill(color.BACKGROUND, opacity=1).set_color(color.BACKGROUND)
self.add(numberhidebox1, numberhidebox2)
decompView = Rectangle(color=color.YELLOW, width=3.5, height=4.5).move_to(LEFT * 3)
equations = [VGroup() for i in range(0,8)]
self.play(FadeIn(decompView), FadeIn(baseText), FadeIn(t1), FadeIn(t2), FadeIn(t3), FadeIn(t4), *[GrowFromCenter(dots[i]) for i in range(0,21)])
self.wait(1)
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1 = VGroup()
group1.add(*[dots[i] for i in range(0,8)])
group2 = VGroup()
group2.add(*[dots[i] for i in range(8,21)])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125), group2.animate.shift(LEFT * 0.125), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25 * 13))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in range(0,8)])
group2.add(*[dots[i] for i in range(0,8)])
group2.remove(*[dots[i] for i in range(8,13)])
group1.add(*[dots[i] for i in range(8,13)])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125 * 3), group2.animate.set_color(color.RED).shift(LEFT * 0.125), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25 * 8))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in range(8,13)])
group2.add(*[dots[i] for i in range(8,13)])
group2.remove(*[dots[i] for i in [0,1,2,13,14,15]])
group1.add(*[dots[i] for i in [0,1,2,13,14,15]])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125 * 4), group2.animate.set_color(color.RED).shift(LEFT * 0.25), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25 * 5))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in [0,1,2,13,14,15]])
group2.add(*[dots[i] for i in [0,1,2,13,14,15]])
group2.remove(*[dots[i] for i in [16,17,3,4,8,9]])
group1.add(*[dots[i] for i in [16,17,3,4,8,9]])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125 * 7), group2.animate.set_color(color.RED).shift(LEFT * 0.125 * 3), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25 * 3))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in [16,17,3,4,8,9]])
group2.add(*[dots[i] for i in [16,17,3,4,8,9]])
group2.remove(*[dots[i] for i in [18,5,10,0,13]])
group1.add(*[dots[i] for i in [18,5,10,0,13]])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125 * 11), group2.animate.set_color(color.RED).shift(LEFT * 0.125 * 5), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25 * 2))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in [18,5,10,0,13]])
group2.add(*[dots[i] for i in [18,5,10,0,13]])
group2.remove(*[dots[i] for i in [19,6,11,1,14,16,3,8]])
group1.add(*[dots[i] for i in [19,6,11,1,14,16,3,8]])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125 * 18), group2.animate.set_color(color.RED).shift(LEFT * 0.125 * 8), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in [19,6,11,1,14,16,3,8]])
group2.add(*[dots[i] for i in [19,6,11,1,14,16,3,8]])
self.play(group2.animate.set_color(color.RED), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
self.play(FadeIn(name))
self.wait(3)
self.play(FadeOut(baseText, t1[7], t2[0], t3[7], t4[0], *dots, name))
self.play(*[equations[i].animate.shift(RIGHT * 3) for i in range(0,8)], decompView.animate.shift(RIGHT * 3))
self.play(FadeOut(*[equations[i][0] for i in range(0,8)]))
class DecompDotLongEnd (Scene):
def construct(self):
loop = 0
fibo = [0,1,1,2,3,5,8,13,21]
self.camera.background_color = color.BACKGROUND
dots = [Dot().set_color(color.RED).move_to(UP * 0.25 * (10-i) + RIGHT * 3) for i in range(0,21)]
baseText = Text('× + ×').scale(0.5).set_color(BLACK).move_to(DOWN * 3 + RIGHT * 3)
name = Text('Fibonacci Decomposition').set_color(BLACK).move_to(UP * 3)
t1 = VGroup()
t2 = VGroup()
t3 = VGroup()
t4 = VGroup()
for n in range(0,8):
t1.add(Text(str(fibo[8-n])).set_color(color.RED))
t2.add(Text(str(fibo[8-n])).set_color(color.RED))
t3.add(Text(str(fibo[8-n-1])).set_color(color.BLUE))
t4.add(Text(str(fibo[8-n-1])).set_color(color.BLUE))
t1.scale(0.5).arrange(DOWN).move_to(LEFT * (1.15 - 3) + DOWN * (1.75 + 3))
t2.scale(0.5).arrange(DOWN).move_to(LEFT * (0.4 - 3) + UP * (1.75 - 3))
t3.scale(0.5).arrange(DOWN).move_to(RIGHT * (0.4 + 3) + DOWN * (1.75 + 3))
t4.scale(0.5).arrange(DOWN).move_to(RIGHT * (1.15 + 3) + UP * (1.75 - 3))
self.add(t1, t2, t3, t4)
numberhidebox1 = Square().scale(2).move_to(UP * (2.25 - 3) + RIGHT * 3.5)
numberhidebox1.set_fill(color.BACKGROUND, opacity=1).set_color(color.BACKGROUND)
numberhidebox2 = Square().scale(2).move_to(DOWN * 5.25 + RIGHT * 3.5)
numberhidebox2.set_fill(color.BACKGROUND, opacity=1).set_color(color.BACKGROUND)
self.add(numberhidebox1, numberhidebox2)
decompView = Rectangle(color=color.YELLOW, width=3.5, height=4.5).move_to(LEFT * 3)
equations = [VGroup() for i in range(0,8)]
self.play(FadeIn(decompView), FadeIn(baseText), FadeIn(t1), FadeIn(t2), FadeIn(t3), FadeIn(t4), *[GrowFromCenter(dots[i]) for i in range(0,21)])
self.wait(1)
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1 = VGroup()
group1.add(*[dots[i] for i in range(0,8)])
group2 = VGroup()
group2.add(*[dots[i] for i in range(8,21)])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125), group2.animate.shift(LEFT * 0.125), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25 * 13))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in range(0,8)])
group2.add(*[dots[i] for i in range(0,8)])
group2.remove(*[dots[i] for i in range(8,13)])
group1.add(*[dots[i] for i in range(8,13)])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125 * 3), group2.animate.set_color(color.RED).shift(LEFT * 0.125), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25 * 8))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in range(8,13)])
group2.add(*[dots[i] for i in range(8,13)])
group2.remove(*[dots[i] for i in [0,1,2,13,14,15]])
group1.add(*[dots[i] for i in [0,1,2,13,14,15]])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125 * 4), group2.animate.set_color(color.RED).shift(LEFT * 0.25), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25 * 5))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in [0,1,2,13,14,15]])
group2.add(*[dots[i] for i in [0,1,2,13,14,15]])
group2.remove(*[dots[i] for i in [16,17,3,4,8,9]])
group1.add(*[dots[i] for i in [16,17,3,4,8,9]])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125 * 7), group2.animate.set_color(color.RED).shift(LEFT * 0.125 * 3), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25 * 3))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in [16,17,3,4,8,9]])
group2.add(*[dots[i] for i in [16,17,3,4,8,9]])
group2.remove(*[dots[i] for i in [18,5,10,0,13]])
group1.add(*[dots[i] for i in [18,5,10,0,13]])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125 * 11), group2.animate.set_color(color.RED).shift(LEFT * 0.125 * 5), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25 * 2))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in [18,5,10,0,13]])
group2.add(*[dots[i] for i in [18,5,10,0,13]])
group2.remove(*[dots[i] for i in [19,6,11,1,14,16,3,8]])
group1.add(*[dots[i] for i in [19,6,11,1,14,16,3,8]])
self.play(group1.animate.set_color(color.BLUE).shift(RIGHT * 0.125 * 18), group2.animate.set_color(color.RED).shift(LEFT * 0.125 * 8), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
self.play(group1.animate.shift(DOWN * 0.25))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
group1.remove(*[dots[i] for i in [19,6,11,1,14,16,3,8]])
group2.add(*[dots[i] for i in [19,6,11,1,14,16,3,8]])
self.play(group2.animate.set_color(color.RED), t1.animate.shift(UP * 0.5), t2.animate.shift(DOWN * 0.5), t3.animate.shift(UP * 0.5), t4.animate.shift(DOWN * 0.5))
loop = moveEquation(equations,loop,baseText,t1,t2,t3,t4,self)
self.play(FadeIn(name))
self.wait(11)
self.play(FadeIn(Square().scale(10).set_fill(color.BACKGROUND).set_opacity(1)))
class Decomp(Scene):
def construct(self):
fibo = fiboarray_extended(-18, 18)
self.camera.background_color = color.BACKGROUND
fibonacci = VGroup(*[Text(str(fibo[i])).set_color(BLACK) for i in range(0, 35)]).arrange(RIGHT * 4).move_to(UP * 2.5 + LEFT * 10)
baseText = Text('× + × = 21').scale(0.5).set_color(BLACK).move_to(RIGHT * 1.075)
decompView = Rectangle(color=color.YELLOW, width=3.5, height=4.5).move_to(ORIGIN)
t1 = VGroup()
t2 = VGroup()
t3 = VGroup()
t4 = VGroup()
for n in range(0,35):
t1.add(Text(str(fibo[-n + 7 + 1])).set_color(color.RED))
t2.add(Text(str(fibo[n+1])).set_color(color.RED))
t3.add(Text(str(fibo[-n + 7])).set_color(color.BLUE))
t4.add(Text(str(fibo[n])).set_color(color.BLUE))
t1.scale(0.5).arrange(DOWN).move_to(LEFT * (1.15))
t2.scale(0.5).arrange(DOWN).move_to(LEFT * (0.4))
t3.scale(0.5).arrange(DOWN).move_to(RIGHT * (0.4))
t4.scale(0.5).arrange(DOWN).move_to(RIGHT * (1.15))
numbers = VGroup(t1, t2, t3, t4)
numbers.shift(UP * 2.25)
numberhideboxes = VGroup(Square().scale(2).move_to(UP * (4)).set_fill(color.BACKGROUND, opacity=1).set_color(color.BACKGROUND), Square().scale(2).move_to(DOWN * 4).set_fill(color.BACKGROUND, opacity=1).set_color(color.BACKGROUND))
self.add(numbers, numberhideboxes, decompView)
self.wait(1.8)
self.play(numbers.animate.shift(DOWN), decompView.animate.shift(DOWN), numberhideboxes.animate.shift(DOWN), FadeIn(fibonacci))
self.wait(1)
self.play(fibonacci.animate.shift(RIGHT * 14), run_time=5)
self.wait(3)
self.play(FadeOut(fibonacci), numbers.animate.shift(UP * 0.75), decompView.animate.shift(UP).stretch_to_fit_height(6), numberhideboxes[0].animate.shift(UP * 2), Write(baseText))
self.wait(1)
for i in range(0,7):
self.play(numbers.animate.shift(UP * 0.5), run_time=0.75)
self.wait(2)
self.play(FadeIn(Square().scale(10).set_fill(color.BACKGROUND).set_opacity(1)))
| 50.482993
| 258
| 0.607667
| 2,543
| 14,842
| 3.510421
| 0.043649
| 0.108883
| 0.038983
| 0.050409
| 0.902207
| 0.889885
| 0.872634
| 0.870617
| 0.867705
| 0.851798
| 0
| 0.101258
| 0.196874
| 14,842
| 293
| 259
| 50.65529
| 0.647148
| 0
| 0
| 0.811594
| 0
| 0
| 0.006872
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019324
| false
| 0
| 0.019324
| 0
| 0.057971
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3a7c302c456d49a44fb4eec66216a8f383187208
| 2,277
|
py
|
Python
|
conditional_statements_advanced/lab/ski_trip.py
|
PetkoAndreev/Python-basics
|
a376362548380ae50c7c707551cb821547f44402
|
[
"MIT"
] | null | null | null |
conditional_statements_advanced/lab/ski_trip.py
|
PetkoAndreev/Python-basics
|
a376362548380ae50c7c707551cb821547f44402
|
[
"MIT"
] | null | null | null |
conditional_statements_advanced/lab/ski_trip.py
|
PetkoAndreev/Python-basics
|
a376362548380ae50c7c707551cb821547f44402
|
[
"MIT"
] | null | null | null |
days = int(input())
type_room = input()
grade = input()
nights = days - 1
if type_room == 'room for one person' and grade == 'positive':
price = nights * 18
price += price * 0.25
print(f'{price:.2f}')
elif type_room == 'room for one person' and grade =='negative':
price = nights * 18
price -= price * 0.1
print(f'{price:.2f}')
elif type_room == 'apartment' and grade == 'positive':
if nights < 10:
price = nights * 25
price -= price * 0.3
price += price * 0.25
print(f'{price:.2f}')
elif 10 <= nights <= 15:
price = nights * 25
price -= price * 0.35
price += price * 0.25
print(f'{price:.2f}')
elif nights > 15:
price = nights * 25
price -= price * 0.5
price += price * 0.25
print(f'{price:.2f}')
elif type_room == 'apartment' and grade == 'negative':
if nights < 10:
price = nights * 25
price -= price * 0.3
price -= price * 0.1
print(f'{price:.2f}')
elif 10 <= nights <= 15:
price = nights * 25
price -= price * 0.35
price -= price * 0.1
print(f'{price:.2f}')
elif nights > 15:
price = nights * 25
price -= price * 0.5
price -= price * 0.1
print(f'{price:.2f}')
elif type_room == 'president apartment' and grade == 'positive':
if nights < 10:
price = nights * 35
price -= price * 0.1
price += price * 0.25
print(f'{price:.2f}')
elif 10 <= nights <= 15:
price = nights * 25
price -= price * 0.15
price += price * 0.25
print(f'{price:.2f}')
elif nights > 15:
price = nights * 35
price -= price * 0.2
price += price * 0.25
print(f'{price:.2f}')
elif type_room == 'president apartment' and grade == 'negative':
if nights < 10:
price = nights * 35
price -= price * 0.1
price -= price * 0.1
print(f'{price:.2f}')
elif 10 <= nights <= 15:
price = nights * 25
price -= price * 0.15
price -= price * 0.1
print(f'{price:.2f}')
elif nights > 15:
price = nights * 35
price -= price * 0.2
price -= price * 0.1
print(f'{price:.2f}')
| 29.960526
| 64
| 0.498902
| 303
| 2,277
| 3.726073
| 0.10231
| 0.230292
| 0.253322
| 0.161205
| 0.943313
| 0.943313
| 0.920283
| 0.920283
| 0.858282
| 0.829938
| 0
| 0.093055
| 0.348704
| 2,277
| 76
| 65
| 29.960526
| 0.66824
| 0
| 0
| 0.868421
| 0
| 0
| 0.129939
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.184211
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3af5e262165efd28e4fad979b4ccd87a58302c19
| 1,080
|
py
|
Python
|
samples/cli/accelbyte_py_sdk_cli/gametelemetry/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
samples/cli/accelbyte_py_sdk_cli/gametelemetry/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
samples/cli/accelbyte_py_sdk_cli/gametelemetry/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template_file: python-cli-init.j2
# Analytics Game Telemetry (0.0.1)
from ._protected_save_events_game_telemetry_v1_protected_events_post import protected_save_events_game_telemetry_v1_protected_events_post
from ._protected_get_playtime_game_telemetry_v1_protected_steam_ids_steam_id_playtime_get import protected_get_playtime_game_telemetry_v1_protected_steam_ids_steam_id_playtime_get
from ._protected_update_playtime_game_telemetry_v1_protected_steam_ids_steam_id_playtime_playtime_put import protected_update_playtime_game_telemetry_v1_protected_steam_ids_steam_id_playtime_playtime_put
commands = [
protected_save_events_game_telemetry_v1_protected_events_post,
protected_get_playtime_game_telemetry_v1_protected_steam_ids_steam_id_playtime_get,
protected_update_playtime_game_telemetry_v1_protected_steam_ids_steam_id_playtime_playtime_put,
]
| 51.428571
| 203
| 0.898148
| 156
| 1,080
| 5.557692
| 0.333333
| 0.149942
| 0.155709
| 0.249135
| 0.705882
| 0.705882
| 0.705882
| 0.705882
| 0.705882
| 0.522491
| 0
| 0.016966
| 0.072222
| 1,080
| 20
| 204
| 54
| 0.848303
| 0.248148
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c91beedc3966455f3bf068ecff78fc2b77740064
| 246
|
py
|
Python
|
ddganAE/architectures/__init__.py
|
Zeff020/Adversarial_ROM
|
8c9e7ff86250e9370e5fdd2018f9ad04ded5f122
|
[
"MIT"
] | 1
|
2021-12-27T06:14:32.000Z
|
2021-12-27T06:14:32.000Z
|
ddganAE/architectures/__init__.py
|
Zeff020/Adversarial_ROM
|
8c9e7ff86250e9370e5fdd2018f9ad04ded5f122
|
[
"MIT"
] | null | null | null |
ddganAE/architectures/__init__.py
|
Zeff020/Adversarial_ROM
|
8c9e7ff86250e9370e5fdd2018f9ad04ded5f122
|
[
"MIT"
] | 3
|
2021-08-05T11:17:37.000Z
|
2021-09-02T02:37:44.000Z
|
# Do these imports such that user can import all architectures at once
from .cae.D2 import * # noqa: F403, F401
from .cae.D3 import * # noqa: F403, F401
from .svdae import * # noqa: F403, F401
from .discriminators import * # noqa: F403, F401
| 41
| 70
| 0.707317
| 38
| 246
| 4.578947
| 0.552632
| 0.229885
| 0.321839
| 0.413793
| 0.37931
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13198
| 0.199187
| 246
| 5
| 71
| 49.2
| 0.751269
| 0.552846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c92356546714ccd0010ff5f5eadd8d31737fb68b
| 34,300
|
py
|
Python
|
magnum/tests/unit/api/controllers/v1/test_container.py
|
MatMaul/magnum
|
4d5fd80d89e38e98aff24f01b967a57d0adcd191
|
[
"Apache-2.0"
] | null | null | null |
magnum/tests/unit/api/controllers/v1/test_container.py
|
MatMaul/magnum
|
4d5fd80d89e38e98aff24f01b967a57d0adcd191
|
[
"Apache-2.0"
] | null | null | null |
magnum/tests/unit/api/controllers/v1/test_container.py
|
MatMaul/magnum
|
4d5fd80d89e38e98aff24f01b967a57d0adcd191
|
[
"Apache-2.0"
] | 1
|
2020-09-09T14:35:08.000Z
|
2020-09-09T14:35:08.000Z
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from mock import patch
from webtest.app import AppError
from magnum.common import utils as comm_utils
from magnum import objects
from magnum.objects import fields
from magnum.tests.unit.api import base as api_base
from magnum.tests.unit.db import utils
from magnum.tests.unit.objects import utils as obj_utils
class TestContainerController(api_base.FunctionalTest):
def setUp(self):
super(TestContainerController, self).setUp()
p = patch('magnum.objects.Bay.get_by_uuid')
self.mock_bay_get_by_uuid = p.start()
self.addCleanup(p.stop)
def fake_get_by_uuid(context, uuid):
bay_dict = utils.get_test_bay(uuid=uuid)
baymodel = obj_utils.get_test_baymodel(
context, coe='swarm', uuid=bay_dict['baymodel_id'])
bay = objects.Bay(self.context, **bay_dict)
bay.baymodel = baymodel
return bay
self.mock_bay_get_by_uuid.side_effect = fake_get_by_uuid
@patch('magnum.conductor.api.API.container_create')
def test_create_container(self, mock_container_create):
mock_container_create.side_effect = lambda x: x
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env", "memory": "512m",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e",'
'"environment": {"key1": "val1", "key2": "val2"}}')
response = self.app.post('/v1/containers',
params=params,
content_type='application/json')
self.assertEqual(201, response.status_int)
self.assertTrue(mock_container_create.called)
@patch('magnum.conductor.api.API.container_create')
def test_create_container_set_project_id_and_user_id(
self, mock_container_create):
def _create_side_effect(container):
self.assertEqual(self.context.project_id, container.project_id)
self.assertEqual(self.context.user_id, container.user_id)
return container
mock_container_create.side_effect = _create_side_effect
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env", "memory": "512m",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e",'
'"environment": {"key1": "val1", "key2": "val2"}}')
self.app.post('/v1/containers',
params=params,
content_type='application/json')
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.conductor.api.API.container_create')
@patch('magnum.conductor.api.API.container_delete')
def test_create_container_with_command(self,
mock_container_delete,
mock_container_create,
mock_container_show):
mock_container_create.side_effect = lambda x: x
bay = obj_utils.create_test_bay(self.context)
# Create a container with a command
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env", "memory": "512m",'
'"bay_uuid": "%s",'
'"environment": {"key1": "val1", "key2": "val2"}}' %
bay.uuid)
response = self.app.post('/v1/containers',
params=params,
content_type='application/json')
self.assertEqual(201, response.status_int)
# get all containers
container = objects.Container.list(self.context)[0]
container.status = 'Stopped'
mock_container_show.return_value = container
response = self.app.get('/v1/containers')
self.assertEqual(200, response.status_int)
self.assertEqual(1, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('My Docker', c.get('name'))
self.assertEqual('env', c.get('command'))
self.assertEqual('Stopped', c.get('status'))
self.assertEqual('512m', c.get('memory'))
self.assertEqual({"key1": "val1", "key2": "val2"},
c.get('environment'))
# Delete the container we created
response = self.app.delete('/v1/containers/%s' % c.get('uuid'))
self.assertEqual(204, response.status_int)
response = self.app.get('/v1/containers')
self.assertEqual(200, response.status_int)
c = response.json['containers']
self.assertEqual(0, len(c))
self.assertTrue(mock_container_create.called)
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.conductor.api.API.container_create')
@patch('magnum.conductor.api.API.container_delete')
def test_create_container_with_bay_uuid(self,
mock_container_delete,
mock_container_create,
mock_container_show):
mock_container_create.side_effect = lambda x: x
# Create a container with a command
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env", "memory": "512m",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e",'
'"environment": {"key1": "val1", "key2": "val2"}}')
response = self.app.post('/v1/containers',
params=params,
content_type='application/json')
self.assertEqual(201, response.status_int)
# get all containers
container = objects.Container.list(self.context)[0]
container.status = 'Stopped'
mock_container_show.return_value = container
response = self.app.get('/v1/containers')
self.assertEqual(200, response.status_int)
self.assertEqual(1, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('My Docker', c.get('name'))
self.assertEqual('env', c.get('command'))
self.assertEqual('Stopped', c.get('status'))
self.assertEqual('512m', c.get('memory'))
self.assertEqual({"key1": "val1", "key2": "val2"},
c.get('environment'))
# Delete the container we created
response = self.app.delete('/v1/containers/%s' % c.get('uuid'))
self.assertEqual(204, response.status_int)
response = self.app.get('/v1/containers')
self.assertEqual(200, response.status_int)
c = response.json['containers']
self.assertEqual(0, len(c))
self.assertTrue(mock_container_create.called)
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.conductor.api.API.container_create')
def test_create_container_without_memory(self,
mock_container_create,
mock_container_show):
mock_container_create.side_effect = lambda x: x
bay = obj_utils.create_test_bay(self.context)
# Create a container with a command
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env",'
'"bay_uuid": "%s",'
'"environment": {"key1": "val1", "key2": "val2"}}' %
bay.uuid)
response = self.app.post('/v1/containers',
params=params,
content_type='application/json')
self.assertEqual(201, response.status_int)
# get all containers
container = objects.Container.list(self.context)[0]
container.status = 'Stopped'
mock_container_show.return_value = container
response = self.app.get('/v1/containers')
self.assertEqual(200, response.status_int)
self.assertEqual(1, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('My Docker', c.get('name'))
self.assertEqual('env', c.get('command'))
self.assertEqual('Stopped', c.get('status'))
self.assertIsNone(c.get('memory'))
self.assertEqual({"key1": "val1", "key2": "val2"},
c.get('environment'))
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.conductor.api.API.container_create')
def test_create_container_without_environment(self,
mock_container_create,
mock_container_show):
mock_container_create.side_effect = lambda x: x
# Create a container with a command
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env", "memory": "512m",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
response = self.app.post('/v1/containers',
params=params,
content_type='application/json')
self.assertEqual(201, response.status_int)
# get all containers
container = objects.Container.list(self.context)[0]
container.status = 'Stopped'
mock_container_show.return_value = container
response = self.app.get('/v1/containers')
self.assertEqual(200, response.status_int)
self.assertEqual(1, len(response.json))
c = response.json['containers'][0]
self.assertIsNotNone(c.get('uuid'))
self.assertEqual('My Docker', c.get('name'))
self.assertEqual('env', c.get('command'))
self.assertEqual('Stopped', c.get('status'))
self.assertEqual('512m', c.get('memory'))
self.assertEqual({}, c.get('environment'))
@patch('magnum.conductor.api.API.container_create')
def test_create_container_without_name(self, mock_container_create):
# No name param
params = ('{"image": "ubuntu", "command": "env", "memory": "512m",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e",'
'"environment": {"key1": "val1", "key2": "val2"}}')
self.assertRaises(AppError, self.app.post, '/v1/containers',
params=params, content_type='application/json')
self.assertTrue(mock_container_create.not_called)
@patch('magnum.conductor.api.API.container_create')
def _test_create_container_invalid_params(self, params,
mock_container_create):
self.assertRaises(AppError, self.app.post, '/v1/containers',
params=params, content_type='application/json')
self.assertTrue(mock_container_create.not_called)
def test_create_container_invalid_long_name(self):
# Long name
params = ('{"name": "' + 'i' * 256 + '", "image": "ubuntu",'
'"command": "env", "memory": "512m",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
self._test_create_container_invalid_params(params)
def test_create_container_no_memory_unit(self):
params = ('{"name": "ubuntu", "image": "ubuntu",'
'"command": "env", "memory": "512",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
self._test_create_container_invalid_params(params)
def test_create_container_bad_memory_unit(self):
params = ('{"name": "ubuntu", "image": "ubuntu",'
'"command": "env", "memory": "512S",'
'"bay_uuid": "fff114da-3bfa-4a0f-a123-c0dffad9718e"}')
self._test_create_container_invalid_params(params)
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.list')
def test_get_all_containers(self, mock_container_list,
mock_container_show):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
mock_container_show.return_value = containers[0]
response = self.app.get('/v1/containers')
mock_container_list.assert_called_once_with(mock.ANY,
1000, None, 'id', 'asc',
filters=None)
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(test_container['uuid'],
actual_containers[0].get('uuid'))
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.list')
def test_get_all_containers_with_pagination_marker(self,
mock_container_list,
mock_container_show):
container_list = []
for id_ in range(4):
test_container = utils.create_test_container(
id=id_, uuid=comm_utils.generate_uuid())
container_list.append(objects.Container(self.context,
**test_container))
mock_container_list.return_value = container_list[-1:]
mock_container_show.return_value = container_list[-1]
response = self.app.get('/v1/containers?limit=3&marker=%s'
% container_list[2].uuid)
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(container_list[-1].uuid,
actual_containers[0].get('uuid'))
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.list')
def test_detail_containers_with_pagination_marker(self,
mock_container_list,
mock_container_show):
container_list = []
for id_ in range(4):
test_container = utils.create_test_container(
id=id_, uuid=comm_utils.generate_uuid())
container_list.append(objects.Container(self.context,
**test_container))
mock_container_list.return_value = container_list[-1:]
mock_container_show.return_value = container_list[-1]
response = self.app.get('/v1/containers/detail?limit=3&marker=%s'
% container_list[2].uuid)
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(container_list[-1].uuid,
actual_containers[0].get('uuid'))
self.assertIn('name', actual_containers[0])
self.assertIn('bay_uuid', actual_containers[0])
self.assertIn('status', actual_containers[0])
self.assertIn('image', actual_containers[0])
self.assertIn('command', actual_containers[0])
self.assertIn('memory', actual_containers[0])
self.assertIn('environment', actual_containers[0])
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.list')
def test_get_all_containers_with_exception(self, mock_container_list,
mock_container_show):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
mock_container_show.side_effect = Exception
response = self.app.get('/v1/containers')
mock_container_list.assert_called_once_with(mock.ANY,
1000, None, 'id', 'asc',
filters=None)
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(test_container['uuid'],
actual_containers[0].get('uuid'))
self.assertEqual(fields.ContainerStatus.UNKNOWN,
actual_containers[0].get('status'))
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.api.utils.get_resource')
@patch('magnum.objects.Container.list')
def test_get_all_containers_with_bay_ident(self, mock_container_list,
mock_retrive_bay_uuid,
mock_container_show):
test_container = utils.get_test_container()
containers = [objects.Container(self.context, **test_container)]
mock_container_list.return_value = containers
mock_retrive_bay_uuid.return_value.uuid = '12'
mock_container_show.return_value = containers[0]
response = self.app.get('/v1/containers/?bay_ident=12')
mock_container_list.assert_called_once_with(mock.ANY,
1000, None, 'id', 'asc',
filters={'bay_uuid': '12'})
self.assertEqual(200, response.status_int)
actual_containers = response.json['containers']
self.assertEqual(1, len(actual_containers))
self.assertEqual(test_container['uuid'],
actual_containers[0].get('uuid'))
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.get_by_uuid')
def test_get_one_by_uuid(self, mock_container_get_by_uuid,
mock_container_show):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_uuid.return_value = test_container_obj
mock_container_show.return_value = test_container_obj
response = self.app.get('/v1/containers/%s' % test_container['uuid'])
mock_container_get_by_uuid.assert_called_once_with(
mock.ANY,
test_container['uuid'])
self.assertEqual(200, response.status_int)
self.assertEqual(test_container['uuid'],
response.json['uuid'])
@patch('magnum.conductor.api.API.container_show')
@patch('magnum.objects.Container.get_by_name')
def test_get_one_by_name(self, mock_container_get_by_name,
mock_container_show):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_container_get_by_name.return_value = test_container_obj
mock_container_show.return_value = test_container_obj
response = self.app.get('/v1/containers/%s' % test_container['name'])
mock_container_get_by_name.assert_called_once_with(
mock.ANY,
test_container['name'])
self.assertEqual(200, response.status_int)
self.assertEqual(test_container['uuid'],
response.json['uuid'])
def _action_test(self, container, action, ident_field):
test_container_obj = objects.Container(self.context, **container)
ident = container.get(ident_field)
get_by_ident_loc = 'magnum.objects.Container.get_by_%s' % ident_field
with patch(get_by_ident_loc) as mock_get_by_indent:
mock_get_by_indent.return_value = test_container_obj
response = self.app.put('/v1/containers/%s/%s' % (ident,
action))
self.assertEqual(200, response.status_int)
# Only PUT should work, others like GET should fail
self.assertRaises(AppError, self.app.get,
('/v1/containers/%s/%s' %
(ident, action)))
@patch('magnum.conductor.api.API.container_start')
def test_start_by_uuid(self, mock_container_start):
mock_container_start.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'start', 'uuid')
mock_container_start.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_start')
def test_start_by_name(self, mock_container_start):
mock_container_start.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'start', 'name')
mock_container_start.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_stop')
def test_stop_by_uuid(self, mock_container_stop):
mock_container_stop.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'stop', 'uuid')
mock_container_stop.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_stop')
def test_stop_by_name(self, mock_container_stop):
mock_container_stop.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'stop', 'name')
mock_container_stop.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_pause')
def test_pause_by_uuid(self, mock_container_pause):
mock_container_pause.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'pause', 'uuid')
mock_container_pause.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_pause')
def test_pause_by_name(self, mock_container_pause):
mock_container_pause.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'pause', 'name')
mock_container_pause.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_unpause')
def test_unpause_by_uuid(self, mock_container_unpause):
mock_container_unpause.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'unpause', 'uuid')
mock_container_unpause.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_unpause')
def test_unpause_by_name(self, mock_container_unpause):
mock_container_unpause.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'unpause', 'name')
mock_container_unpause.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_reboot')
def test_reboot_by_uuid(self, mock_container_reboot):
mock_container_reboot.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'reboot', 'uuid')
mock_container_reboot.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_reboot')
def test_reboot_by_name(self, mock_container_reboot):
mock_container_reboot.return_value = ""
test_container = utils.get_test_container()
self._action_test(test_container, 'reboot', 'name')
mock_container_reboot.assert_called_once_with(
test_container.get('uuid'))
@patch('magnum.conductor.api.API.container_logs')
@patch('magnum.objects.Container.get_by_uuid')
def test_get_logs_by_uuid(self, mock_get_by_uuid, mock_container_logs):
mock_container_logs.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
response = self.app.get('/v1/containers/%s/logs' % container_uuid)
self.assertEqual(200, response.status_int)
mock_container_logs.assert_called_once_with(container_uuid)
@patch('magnum.conductor.api.API.container_logs')
@patch('magnum.objects.Container.get_by_name')
def test_get_logs_by_name(self, mock_get_by_name, mock_container_logs):
mock_container_logs.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_name.return_value = test_container_obj
container_name = test_container.get('name')
container_uuid = test_container.get('uuid')
response = self.app.get('/v1/containers/%s/logs' % container_name)
self.assertEqual(200, response.status_int)
mock_container_logs.assert_called_once_with(container_uuid)
@patch('magnum.conductor.api.API.container_logs')
@patch('magnum.objects.Container.get_by_uuid')
def test_get_logs_put_fails(self, mock_get_by_uuid, mock_container_logs):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
self.assertRaises(AppError, self.app.put,
'/v1/containers/%s/logs' % container_uuid)
self.assertFalse(mock_container_logs.called)
@patch('magnum.conductor.api.API.container_exec')
@patch('magnum.objects.Container.get_by_uuid')
def test_execute_command_by_uuid(self, mock_get_by_uuid,
mock_container_exec):
mock_container_exec.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/%s' % (container_uuid, 'execute')
cmd = {'command': 'ls'}
response = self.app.put(url, cmd)
self.assertEqual(200, response.status_int)
mock_container_exec.assert_called_once_with(container_uuid,
cmd['command'])
@patch('magnum.conductor.api.API.container_exec')
@patch('magnum.objects.Container.get_by_name')
def test_execute_command_by_name(self, mock_get_by_name,
mock_container_exec):
mock_container_exec.return_value = ""
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_name.return_value = test_container_obj
container_name = test_container.get('name')
container_uuid = test_container.get('uuid')
url = '/v1/containers/%s/%s' % (container_name, 'execute')
cmd = {'command': 'ls'}
response = self.app.put(url, cmd)
self.assertEqual(200, response.status_int)
mock_container_exec.assert_called_once_with(container_uuid,
cmd['command'])
@patch('magnum.conductor.api.API.container_delete')
@patch('magnum.objects.Container.get_by_uuid')
def test_delete_container_by_uuid(self, mock_get_by_uuid,
mock_container_delete):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_uuid.return_value = test_container_obj
with patch.object(test_container_obj, 'destroy') as mock_destroy:
container_uuid = test_container.get('uuid')
response = self.app.delete('/v1/containers/%s' % container_uuid)
self.assertEqual(204, response.status_int)
mock_container_delete.assert_called_once_with(container_uuid)
mock_destroy.assert_called_once_with()
@patch('magnum.conductor.api.API.container_delete')
@patch('magnum.objects.Container.get_by_name')
def test_delete_container_by_name(self, mock_get_by_name,
mock_container_delete):
test_container = utils.get_test_container()
test_container_obj = objects.Container(self.context, **test_container)
mock_get_by_name.return_value = test_container_obj
with patch.object(test_container_obj, 'destroy') as mock_destroy:
container_name = test_container.get('name')
container_uuid = test_container.get('uuid')
response = self.app.delete('/v1/containers/%s' % container_name)
self.assertEqual(204, response.status_int)
mock_container_delete.assert_called_once_with(container_uuid)
mock_destroy.assert_called_once_with()
class TestContainerEnforcement(api_base.FunctionalTest):
def _common_policy_check(self, rule, func, *arg, **kwarg):
self.policy.set_rules({rule: 'project_id:non_fake'})
response = func(*arg, **kwarg)
self.assertEqual(403, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(
"Policy doesn't allow %s to be performed." % rule,
response.json['errors'][0]['detail'])
def test_policy_disallow_get_all(self):
self._common_policy_check(
'container:get_all', self.get_json, '/containers',
expect_errors=True)
def test_policy_disallow_get_one(self):
container = obj_utils.create_test_container(self.context)
self._common_policy_check(
'container:get', self.get_json,
'/containers/%s' % container.uuid,
expect_errors=True)
def test_policy_disallow_detail(self):
self._common_policy_check(
'container:detail',
self.get_json,
'/containers/%s/detail' % comm_utils.generate_uuid(),
expect_errors=True)
def test_policy_disallow_create(self):
baymodel = obj_utils.create_test_baymodel(self.context)
bay = obj_utils.create_test_bay(self.context,
baymodel_id=baymodel.uuid)
params = ('{"name": "My Docker", "image": "ubuntu",'
'"command": "env", "memory": "512m",'
'"bay_uuid": "%s"}' % bay.uuid)
self._common_policy_check(
'container:create', self.app.post, '/v1/containers', params=params,
content_type='application/json',
expect_errors=True)
def test_policy_disallow_delete(self):
bay = obj_utils.create_test_bay(self.context)
container = obj_utils.create_test_container(self.context,
bay_uuid=bay.uuid)
self._common_policy_check(
'container:delete', self.app.delete,
'/v1/containers/%s' % container.uuid,
expect_errors=True)
def _owner_check(self, rule, func, *args, **kwargs):
self.policy.set_rules({rule: "user_id:%(user_id)s"})
response = func(*args, **kwargs)
self.assertEqual(403, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(
"Policy doesn't allow %s to be performed." % rule,
response.json['errors'][0]['detail'])
def test_policy_only_owner_get_one(self):
container = obj_utils.create_test_container(self.context,
user_id='another')
self._owner_check("container:get", self.get_json,
'/containers/%s' % container.uuid,
expect_errors=True)
def test_policy_only_owner_delete(self):
container = obj_utils.create_test_container(self.context,
user_id='another')
self._owner_check(
"container:delete", self.delete,
'/containers/%s' % container.uuid,
expect_errors=True)
def test_policy_only_owner_logs(self):
container = obj_utils.create_test_container(self.context,
user_id='another')
self._owner_check("container:logs", self.get_json,
'/containers/logs/%s' % container.uuid,
expect_errors=True)
def test_policy_only_owner_execute(self):
container = obj_utils.create_test_container(self.context,
user_id='another')
self._owner_check("container:execute", self.put_json,
'/containers/execute/%s/ls' % container.uuid,
{}, expect_errors=True)
def test_policy_only_owner_actions(self):
actions = ['start', 'stop', 'reboot', 'pause', 'unpause']
container = obj_utils.create_test_container(self.context,
user_id='another')
for action in actions:
self._owner_check('container:%s' % action, self.put_json,
'/containers/%s/%s' % (action, container.uuid),
{}, expect_errors=True)
| 48.039216
| 79
| 0.619009
| 3,813
| 34,300
| 5.273013
| 0.063992
| 0.085348
| 0.0378
| 0.04347
| 0.87357
| 0.840147
| 0.827315
| 0.815229
| 0.793097
| 0.780812
| 0
| 0.015176
| 0.268076
| 34,300
| 713
| 80
| 48.106592
| 0.7857
| 0.026093
| 0
| 0.715947
| 0
| 0
| 0.170641
| 0.078041
| 0
| 0
| 0
| 0
| 0.196013
| 1
| 0.084718
| false
| 0
| 0.01495
| 0
| 0.106312
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c92fe10767ef8acfb969c1fbe58e1b35c3012232
| 37
|
py
|
Python
|
xscale/spectral/__init__.py
|
xy6g13/xscale
|
a0c5809b6005a2016ab85849fa33e24c3fc19518
|
[
"Apache-2.0"
] | 24
|
2017-02-28T15:01:29.000Z
|
2022-02-22T08:26:23.000Z
|
xscale/spectral/__init__.py
|
xy6g13/xscale
|
a0c5809b6005a2016ab85849fa33e24c3fc19518
|
[
"Apache-2.0"
] | 19
|
2017-02-24T12:30:26.000Z
|
2022-02-25T04:57:32.000Z
|
xscale/spectral/__init__.py
|
serazing/xscale
|
a804866aa6f6a5a0f293a7f6765ea17403159134
|
[
"Apache-2.0"
] | 10
|
2017-03-04T02:59:42.000Z
|
2021-11-14T12:40:54.000Z
|
from . import fft
from . import tools
| 18.5
| 19
| 0.756757
| 6
| 37
| 4.666667
| 0.666667
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 37
| 2
| 19
| 18.5
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c939bdd7172d4d5b1d8da70e02e1b8fed7150cdb
| 1,213
|
py
|
Python
|
thoughtfulsoup/checker_map.py
|
dchan3/beautifulsoup4
|
1d32f05415fdea13e70fea734f83eb4ab8a23fdc
|
[
"MIT"
] | 2
|
2018-02-24T19:43:59.000Z
|
2018-02-25T09:05:04.000Z
|
thoughtfulsoup/checker_map.py
|
dchan3/thoughtfulsoup
|
1d32f05415fdea13e70fea734f83eb4ab8a23fdc
|
[
"MIT"
] | null | null | null |
thoughtfulsoup/checker_map.py
|
dchan3/thoughtfulsoup
|
1d32f05415fdea13e70fea734f83eb4ab8a23fdc
|
[
"MIT"
] | null | null | null |
from thoughtfulsoup.counter import Counter
ID_CLASS_CHECKER = {
'#': lambda tag, tok: tag.get('id', None) == tok.split('#', 1)[1],
'.': lambda tag, tok: set(tok.split('.', 1)[1].split('.')).issubset(tag.get('class', []))
}
PSEUDO_TYPE_CHECKER = lambda pseudo_value: {
'nth-child': {
"f": lambda tag, tags, tags_f: Counter(pseudo_value, False).nth_child_of_type(tag, tags, tags_f, False)
},
'nth-of-type': {
"f": lambda tag, tags, tags_f: Counter(pseudo_value, False).nth_child_of_type(tag, tags, tags_f, True)
},
'first-child': {
"f": lambda tag, tags, tags_f: Counter(1, False).nth_child_of_type(tag, tags, tags_f, False)
},
'first-of-type': {
"f": lambda tag, tags, tags_f: Counter(1, False).nth_child_of_type(tag, tags, tags_f, True)
},
'nth-last-of-type': {
"f": lambda tag, tags, tags_f: Counter(pseudo_value, True).nth_child_of_type(tag, tags, tags_f, True)
},
'last-child': {
"f": lambda tag, tags, tags_f: Counter(1, True).nth_child_of_type(tag, tags, tags_f, False)
},
'last-of-type': {
"f": lambda tag, tags, tags_f: Counter(1, True).nth_child_of_type(tag, tags, tags_f, True)
}
}
| 39.129032
| 111
| 0.615004
| 186
| 1,213
| 3.77957
| 0.150538
| 0.139403
| 0.219061
| 0.238976
| 0.726885
| 0.726885
| 0.726885
| 0.726885
| 0.719772
| 0.698435
| 0
| 0.008247
| 0.20033
| 1,213
| 30
| 112
| 40.433333
| 0.716495
| 0
| 0
| 0
| 0
| 0
| 0.083265
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035714
| 0
| 0.035714
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c956cca9392fe525c904a16fd4f8250db5735781
| 1,723
|
py
|
Python
|
utest/x3270/test_write.py
|
MichaelSeeburger/Robot-Framework-Mainframe-3270-Library
|
76b589d58c55a39f96c027a8ae28c41fa37ed445
|
[
"MIT"
] | 3
|
2018-10-02T14:32:06.000Z
|
2018-10-02T14:33:32.000Z
|
utest/x3270/test_write.py
|
MichaelSeeburger/Robot-Framework-Mainframe-3270-Library
|
76b589d58c55a39f96c027a8ae28c41fa37ed445
|
[
"MIT"
] | null | null | null |
utest/x3270/test_write.py
|
MichaelSeeburger/Robot-Framework-Mainframe-3270-Library
|
76b589d58c55a39f96c027a8ae28c41fa37ed445
|
[
"MIT"
] | null | null | null |
from pytest_mock import MockerFixture
from Mainframe3270.py3270 import Emulator
from Mainframe3270.x3270 import x3270
def test_write(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.exec_command")
mocker.patch("Mainframe3270.py3270.Emulator.send_enter")
under_test.write("abc")
Emulator.exec_command.assert_called_once_with(b'String("abc")')
Emulator.send_enter.assert_called_once()
def test_write_bare(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.exec_command")
mocker.patch("Mainframe3270.py3270.Emulator.send_enter")
under_test.write_bare("abc")
Emulator.exec_command.assert_called_once_with(b'String("abc")')
Emulator.send_enter.assert_not_called()
def test_write_in_position(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.exec_command")
mocker.patch("Mainframe3270.py3270.Emulator.move_to")
mocker.patch("Mainframe3270.py3270.Emulator.send_enter")
under_test.write_in_position("abc", 5, 5)
Emulator.move_to.assert_called_once_with(5, 5)
Emulator.exec_command.assert_called_once_with(b'String("abc")')
Emulator.send_enter.assert_called_once()
def test_write_bare_in_position(mocker: MockerFixture, under_test: x3270):
mocker.patch("Mainframe3270.py3270.Emulator.exec_command")
mocker.patch("Mainframe3270.py3270.Emulator.move_to")
mocker.patch("Mainframe3270.py3270.Emulator.send_enter")
under_test.write_bare_in_position("abc", 5, 5)
Emulator.move_to.assert_called_once_with(5, 5)
Emulator.exec_command.assert_called_once_with(b'String("abc")')
Emulator.send_enter.assert_not_called()
| 35.163265
| 74
| 0.784678
| 231
| 1,723
| 5.545455
| 0.142857
| 0.163154
| 0.187354
| 0.234192
| 0.900859
| 0.900859
| 0.900859
| 0.900859
| 0.900859
| 0.900859
| 0
| 0.080103
| 0.101567
| 1,723
| 48
| 75
| 35.895833
| 0.747416
| 0
| 0
| 0.645161
| 0
| 0
| 0.270459
| 0.233314
| 0
| 0
| 0
| 0
| 0.322581
| 1
| 0.129032
| false
| 0
| 0.096774
| 0
| 0.225806
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a32f92272077fe43825f6ffaf97a17ce88eea129
| 5,762
|
py
|
Python
|
Hermes/appscale/hermes/producers/tests/test_cassandra.py
|
Honcharov12/appscale
|
be1cf90fcd24f1a5a88848f7eb73331b6e4e66d9
|
[
"Apache-2.0"
] | null | null | null |
Hermes/appscale/hermes/producers/tests/test_cassandra.py
|
Honcharov12/appscale
|
be1cf90fcd24f1a5a88848f7eb73331b6e4e66d9
|
[
"Apache-2.0"
] | 1
|
2019-10-15T15:57:53.000Z
|
2019-10-15T15:57:53.000Z
|
Hermes/appscale/hermes/producers/tests/test_cassandra.py
|
Honcharov12/appscale
|
be1cf90fcd24f1a5a88848f7eb73331b6e4e66d9
|
[
"Apache-2.0"
] | 1
|
2019-08-27T05:19:48.000Z
|
2019-08-27T05:19:48.000Z
|
from mock import MagicMock, patch
from tornado import gen, testing
from appscale.hermes.producers import cassandra_stats
def future(value):
future = gen.Future()
future.set_result(value)
return future
MULTINODE_STATUS = """Datacenter: datacenter1
=======================
Status=Up/Down
|/ State=Normal/Leaving/Joining/Moving
-- Address Load Tokens Owns (effective) Host ID Rack
UN 10.0.2.15 67.94 GiB 1 99.8% a341df86-71e2-4054-83d6-c2d92dc75afc rack1
UN 10.0.2.16 65.99 GiB 1 0.2% 2ceb81a6-4c49-456d-a38b-23667ee60ff9 rack1
"""
SINGLENODE_STATUS = """Datacenter: datacenter1
=======================
Status=Up/Down
|/ State=Normal/Leaving/Joining/Moving
-- Address Load Owns (effective) Host ID Token Rack
UN 10.0.2.15 337.07 MiB 100.0% 38fd1ac1-85f9-4b19-8f8f-19ef5a00d65d bf5f65abbfab7ac2dd87145d0cde8435 rack1
"""
class TestCurrentCassandraStats(testing.AsyncTestCase):
@patch.object(cassandra_stats.process, 'Subprocess')
@patch.object(cassandra_stats.appscale_info, 'get_db_ips')
@testing.gen_test
def test_multinode(self, mock_get_db_ips, mock_subprocess):
subprocess = MagicMock()
# Mocking `get_db_ips` and Subprocess
mock_get_db_ips.return_value = ['10.0.2.15', '10.0.2.16']
mock_subprocess.return_value = subprocess
subprocess.stdout.read_until_close.return_value = future(MULTINODE_STATUS)
subprocess.stderr.read_until_close.return_value = future('')
# Calling method under test
stats = yield cassandra_stats.CassandraStatsSource.get_current()
# Asserting expectations
self.assertEqual(stats.missing_nodes, [])
self.assertEqual(stats.unknown_nodes, [])
self.assertIsInstance(stats.utc_timestamp, int)
self.assertEqual(len(stats.nodes), 2)
first = stats.nodes[0]
self.assertEqual(first.address, '10.0.2.15')
self.assertEqual(first.status, 'Up')
self.assertEqual(first.state, 'Normal')
self.assertEqual(first.load, int(67.94 * 1024**3))
self.assertEqual(first.owns_pct, 99.8)
self.assertEqual(first.tokens_num, 1)
self.assertEqual(first.host_id, 'a341df86-71e2-4054-83d6-c2d92dc75afc')
self.assertEqual(first.rack, 'rack1')
second = stats.nodes[1]
self.assertEqual(second.address, '10.0.2.16')
self.assertEqual(second.status, 'Up')
self.assertEqual(second.state, 'Normal')
self.assertEqual(second.load, int(65.99 * 1024**3))
self.assertEqual(second.owns_pct, 0.2)
self.assertEqual(second.tokens_num, 1)
self.assertEqual(second.host_id, '2ceb81a6-4c49-456d-a38b-23667ee60ff9')
self.assertEqual(second.rack, 'rack1')
@patch.object(cassandra_stats.process, 'Subprocess')
@patch.object(cassandra_stats.appscale_info, 'get_db_ips')
@testing.gen_test
def test_singlenode(self, mock_get_db_ips, mock_subprocess):
subprocess = MagicMock()
# Mocking `get_db_ips` and Subprocess
mock_get_db_ips.return_value = ['10.0.2.15']
mock_subprocess.return_value = subprocess
subprocess.stdout.read_until_close.return_value = future(SINGLENODE_STATUS)
subprocess.stderr.read_until_close.return_value = future('')
# Calling method under test
stats = yield cassandra_stats.CassandraStatsSource.get_current()
# Asserting expectations
self.assertEqual(stats.missing_nodes, [])
self.assertEqual(stats.unknown_nodes, [])
self.assertIsInstance(stats.utc_timestamp, int)
self.assertEqual(len(stats.nodes), 1)
first = stats.nodes[0]
self.assertEqual(first.address, '10.0.2.15')
self.assertEqual(first.status, 'Up')
self.assertEqual(first.state, 'Normal')
self.assertEqual(first.load, int(337.07 * 1024**2))
self.assertEqual(first.owns_pct, 100.0)
self.assertEqual(first.tokens_num, 1)
self.assertEqual(first.host_id, '38fd1ac1-85f9-4b19-8f8f-19ef5a00d65d')
self.assertEqual(first.rack, 'rack1')
@patch.object(cassandra_stats.process, 'Subprocess')
@patch.object(cassandra_stats.appscale_info, 'get_db_ips')
@testing.gen_test
def test_missing_and_unknown(self, mock_get_db_ips, mock_subprocess):
subprocess = MagicMock()
# Mocking `get_db_ips` and Subprocess
mock_get_db_ips.return_value = ['10.0.2.15', '10.0.2.missing']
mock_subprocess.return_value = subprocess
subprocess.stdout.read_until_close.return_value = future(MULTINODE_STATUS)
subprocess.stderr.read_until_close.return_value = future('')
# Calling method under test
stats = yield cassandra_stats.CassandraStatsSource.get_current()
# Asserting expectations
self.assertEqual(stats.missing_nodes, ['10.0.2.missing'])
self.assertEqual(stats.unknown_nodes, ['10.0.2.16'])
self.assertIsInstance(stats.utc_timestamp, int)
self.assertEqual(len(stats.nodes), 2)
first = stats.nodes[0]
self.assertEqual(first.address, '10.0.2.15')
self.assertEqual(first.status, 'Up')
self.assertEqual(first.state, 'Normal')
self.assertEqual(first.load, int(67.94 * 1024**3))
self.assertEqual(first.owns_pct, 99.8)
self.assertEqual(first.tokens_num, 1)
self.assertEqual(first.host_id, 'a341df86-71e2-4054-83d6-c2d92dc75afc')
self.assertEqual(first.rack, 'rack1')
second = stats.nodes[1]
self.assertEqual(second.address, '10.0.2.16')
self.assertEqual(second.status, 'Up')
self.assertEqual(second.state, 'Normal')
self.assertEqual(second.load, int(65.99 * 1024**3))
self.assertEqual(second.owns_pct, 0.2)
self.assertEqual(second.tokens_num, 1)
self.assertEqual(second.host_id, '2ceb81a6-4c49-456d-a38b-23667ee60ff9')
self.assertEqual(second.rack, 'rack1')
| 39.197279
| 129
| 0.706873
| 749
| 5,762
| 5.285714
| 0.154873
| 0.185653
| 0.121243
| 0.012124
| 0.886335
| 0.832533
| 0.826471
| 0.826471
| 0.826471
| 0.826471
| 0
| 0.075776
| 0.161749
| 5,762
| 146
| 130
| 39.465753
| 0.743892
| 0.044082
| 0
| 0.745455
| 0
| 0.027273
| 0.21626
| 0.079302
| 0
| 0
| 0
| 0
| 0.472727
| 1
| 0.036364
| false
| 0
| 0.027273
| 0
| 0.081818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a342a4ec91cd21725896d3f6211cec181a5e6f58
| 1,451
|
py
|
Python
|
model/parts/v2_hydra_coeffs.py
|
inventandchill/HydraDX-simulations
|
cfc380363c7aa9abc2ee8aae670a4a6d704d36ca
|
[
"Apache-2.0"
] | 4
|
2021-08-12T21:33:26.000Z
|
2022-03-04T22:51:33.000Z
|
model/parts/v2_hydra_coeffs.py
|
inventandchill/HydraDX-simulations
|
cfc380363c7aa9abc2ee8aae670a4a6d704d36ca
|
[
"Apache-2.0"
] | 31
|
2021-10-31T20:18:57.000Z
|
2022-03-25T16:01:41.000Z
|
model/parts/v2_hydra_coeffs.py
|
inventandchill/HydraDX-simulations
|
cfc380363c7aa9abc2ee8aae670a4a6d704d36ca
|
[
"Apache-2.0"
] | 4
|
2021-08-13T06:59:59.000Z
|
2021-12-13T17:47:57.000Z
|
import numpy as np
def addLiquidity_C(params, substep, state_history, prev_state, policy_input):
"""
This function updates and returns the coefficient C after a liquidity add, according to specification 6-28-21
C = C + (R^+ / R) ** (a+1)
"""
asset_id = policy_input['asset_id'] # defines asset subscript
pool = prev_state['pool']
delta_Ri = policy_input['ri_deposit']
Ri = pool.get_reserve(asset_id)
Ci = pool.get_coefficient(asset_id)
a = params['a']
if delta_Ri == 0:
return ('Ci', Ci)
else:
Ri_plus = Ri + delta_Ri
Ci_plus = Ci + (Ri_plus / Ri) ** (a+1)
return ('Ci', Ci_plus)
def removeLiquidity_C(params, substep, state_history, prev_state, policy_input):
"""
This function updates and returns the coefficient C after a liquidity remove, according to specification 6-28-21
C = C + (R^+ / R) ** (a+1)
"""
asset_id = policy_input['asset_id'] # defines asset subscript
pool = prev_state['pool']
delta_S = policy_input['HYDRA_burn']
Ri = pool.get_reserve(asset_id)
Ci = pool.get_coefficient(asset_id)
a = params['a']
Q = prev_state['Q']
Sq = prev_state['Sq']
P = pool.get_price(asset_id)
delta_Ri = (delta_S / Sq) * (Q / P)
if delta_Ri == 0:
return ('Ci', Ci)
else:
Ri_plus = Ri + delta_Ri
Ci_plus = Ci + (Ri_plus / Ri) ** (a+1)
return ('Ci', Ci_plus)
| 32.244444
| 116
| 0.608546
| 213
| 1,451
| 3.920188
| 0.2723
| 0.075449
| 0.047904
| 0.045509
| 0.807186
| 0.807186
| 0.807186
| 0.807186
| 0.807186
| 0.807186
| 0
| 0.014939
| 0.261888
| 1,451
| 44
| 117
| 32.977273
| 0.764706
| 0.223983
| 0
| 0.709677
| 0
| 0
| 0.052486
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0
| 0.032258
| 0
| 0.225806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3a4980259d263dc58914b27fc5999c152e40e72
| 6,633
|
py
|
Python
|
redashAPI/test_client.py
|
strangeman/redash-api-client
|
a3ef598e20b89e80fcef6948636183a8b1617832
|
[
"MIT"
] | 1
|
2021-09-30T12:27:12.000Z
|
2021-09-30T12:27:12.000Z
|
redashAPI/test_client.py
|
strangeman/redash-api-client
|
a3ef598e20b89e80fcef6948636183a8b1617832
|
[
"MIT"
] | null | null | null |
redashAPI/test_client.py
|
strangeman/redash-api-client
|
a3ef598e20b89e80fcef6948636183a8b1617832
|
[
"MIT"
] | null | null | null |
from .client import RedashAPIClient, AlreadyExistsException, EntityNotFoundException
import pytest
REDASH_HOST = "http://redash:5000"
REDASH_API_KEY = "put-your-key-here"
redash = RedashAPIClient(REDASH_API_KEY, REDASH_HOST)
# remove old data
redash.delete_data_source("_datasource-test")
redash.delete_data_source("_datasource-test2")
redash.delete_group("_group-test")
redash.delete_user("_user1-test")
@pytest.fixture(scope='module')
def global_data():
return {'ds_id': 0, 'gr_id': 0, 'user_id': 0}
def test_create_data_source(global_data):
res = redash.create_data_source("pg", "_datasource-test", options={
"dbname": "test_ds",
"host": "test_host",
"password": "test_pwd",
"port": 35432,
"user": "test_user"}).json()
assert res["type"] == "pg"
assert res["options"]["dbname"] == "test_ds"
assert res["options"]["port"] == 35432
assert res["options"]["password"] == "--------"
global_data['ds_id'] = res["id"]
def test_create_data_source_via_create_or_update(global_data):
res = redash.create_or_update_datasource("pg", "_datasource-test2", options={
"dbname": "test_ds2",
"host": "test_host",
"password": "test_pwd",
"port": 35432,
"user": "test_user"}).json()
assert res["type"] == "pg"
assert res["options"]["dbname"] == "test_ds2"
assert res["options"]["port"] == 35432
assert res["options"]["password"] == "--------"
def test_create_duplicate_data_source():
with pytest.raises(AlreadyExistsException):
res = redash.create_data_source("pg", "_datasource-test", options={
"dbname": "test_ds",
"host": "test_host",
"password": "test_pwd",
"port": 35432,
"user": "test_user"}).json()
def test_get_data_source_by_name(global_data):
res = redash.get_data_source_by_name("_datasource-test")
assert res["type"] == "pg"
assert global_data['ds_id'] == res["id"]
def test_create_or_update_datasource(global_data):
res = redash.create_or_update_datasource("pg", "_datasource-test", options={
"dbname": "test_ds_2",
"host": "test_host",
"password": "test_pwd",
"port": 35432,
"user": "test_user"}).json()
assert res["type"] == "pg"
assert res["options"]["dbname"] == "test_ds_2"
assert res["options"]["port"] == 35432
assert res["options"]["password"] == "--------"
assert global_data['ds_id'] == res["id"]
def test_get_data_source_by_name_after_update(global_data):
res = redash.get_data_source_by_name("_datasource-test")
assert res["type"] == "pg"
assert global_data['ds_id'] == res["id"]
def test_create_group(global_data):
res = redash.create_group("_group-test").json()
assert res["name"] == "_group-test"
assert res["type"] == "regular"
global_data['gr_id'] = res["id"]
def test_create_user(global_data):
res = redash.create_user("_user1-test", "test1@example.com").json()
assert res["name"] == "_user1-test"
assert res["auth_type"] == "external"
with pytest.raises(Exception):
redash.create_user("_user1-test", "test1@example.com").json()
global_data['user_id'] = res["id"]
def test_add_user_to_group(global_data):
res = redash.add_user_to_group("_user1-test", "_group-test").json()
assert res["id"] == global_data["user_id"]
assert global_data["gr_id"] in res["groups"]
with pytest.raises(EntityNotFoundException):
redash.add_user_to_group("THAT-USER-DOESNT-EXIST", "_group-test")
with pytest.raises(EntityNotFoundException):
redash.add_user_to_group("_user1-test", "THAT-GROUP-DOESNT-EXIST")
with pytest.raises(EntityNotFoundException):
redash.add_user_to_group("THAT-USER-DOESNT-EXIST", "THAT-GROUP-DOESNT-EXIST")
res = redash.add_user_to_group("_user1-test", "_group-test").json()
assert res['msg'] == "Not changed"
res = redash.get_group_users_by_id(global_data["gr_id"])
assert next((usr for usr in res if usr['id'] == global_data["user_id"]), None) is not None
def test_delete_user_from_group(global_data):
res = redash.delete_user_from_group("_user1-test", "_group-test")
assert res.status_code == 200
res = redash.get_group_users_by_id(global_data["gr_id"])
assert next((usr for usr in res if usr['id'] == global_data["user_id"]), None) is None
res = redash.delete_user_from_group("_user1-test", "_group-test")
assert res.status_code == 404
def test_add_data_source_to_group(global_data):
res = redash.add_data_source_to_group("_datasource-test", "_group-test").json()
assert res["id"] == global_data["ds_id"]
with pytest.raises(EntityNotFoundException):
redash.add_data_source_to_group("THAT-DS-DOESNT-EXIST", "_group-test")
with pytest.raises(EntityNotFoundException):
redash.add_data_source_to_group(
"_datasource-test", "THAT-GROUP-DOESNT-EXIST")
with pytest.raises(EntityNotFoundException):
redash.add_data_source_to_group("THAT-DS-DOESNT-EXIST", "THAT-GROUP-DOESNT-EXIST")
res = redash.add_data_source_to_group("_datasource-test", "_group-test").json()
assert res['msg'] == "Not changed"
res = redash.get_group_data_sources_by_id(global_data["gr_id"])
assert next((ds for ds in res if ds['id'] == global_data["ds_id"]), None) is not None
def test_delete_data_source_from_group(global_data):
res = redash.delete_data_source_from_group("_datasource-test", "_group-test")
assert res.status_code == 200
res = redash.get_group_data_sources_by_id(global_data["gr_id"])
assert next((ds for ds in res if ds['id'] == global_data["ds_id"]), None) is None
res = redash.delete_data_source_from_group("_datasource-test", "_group-test")
assert res.status_code == 404
def test_delete_user():
# if user existed - return 200
res = redash.delete_user("_user1-test")
assert res.status_code == 200
# if not - 404
res = redash.delete_user("_user1-test")
assert res.status_code == 404
def test_delete_group():
# if group existed - return 200
res = redash.delete_group("_group-test")
assert res.status_code == 200
# if not - 404
res = redash.delete_group("_group-test")
assert res.status_code == 404
def test_delete_data_source():
# if ds existed - return 204
res = redash.delete_data_source("_datasource-test")
assert res.status_code == 204
res = redash.delete_data_source("_datasource-test2")
assert res.status_code == 204
# if not - 404
res = redash.delete_data_source("_datasource-test")
assert res.status_code == 404
| 41.198758
| 94
| 0.677974
| 912
| 6,633
| 4.609649
| 0.100877
| 0.070647
| 0.043292
| 0.049715
| 0.855614
| 0.795671
| 0.762131
| 0.715985
| 0.704091
| 0.611323
| 0
| 0.020419
| 0.165687
| 6,633
| 160
| 95
| 41.45625
| 0.739248
| 0.021257
| 0
| 0.544776
| 0
| 0
| 0.222394
| 0.020975
| 0
| 0
| 0
| 0
| 0.30597
| 1
| 0.119403
| false
| 0.052239
| 0.014925
| 0.007463
| 0.141791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
6e883f066e75d4957d2444be77c78d9888f2021a
| 30,029
|
py
|
Python
|
dxm/lib/masking_api/api/database_ruleset_api.py
|
experiortec/dxm-toolkit
|
b2ab6189e163c62fa8d7251cd533d2a36430d44a
|
[
"Apache-2.0"
] | 5
|
2018-08-23T15:47:05.000Z
|
2022-01-19T23:38:18.000Z
|
dxm/lib/masking_api/api/database_ruleset_api.py
|
experiortec/dxm-toolkit
|
b2ab6189e163c62fa8d7251cd533d2a36430d44a
|
[
"Apache-2.0"
] | 59
|
2018-10-15T10:37:00.000Z
|
2022-03-22T20:49:25.000Z
|
dxm/lib/masking_api/api/database_ruleset_api.py
|
experiortec/dxm-toolkit
|
b2ab6189e163c62fa8d7251cd533d2a36430d44a
|
[
"Apache-2.0"
] | 12
|
2019-03-08T19:59:13.000Z
|
2021-12-16T03:28:04.000Z
|
# coding: utf-8
"""
Masking API
Schema for the Masking Engine API # noqa: E501
OpenAPI spec version: 5.1.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dxm.lib.masking_api.api_client import ApiClient
class DatabaseRulesetApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def bulk_table_update(self, database_ruleset_id, body, **kwargs): # noqa: E501
"""Update the set of tables and their attributes associated with a database ruleset in bulk # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_table_update(database_ruleset_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int database_ruleset_id: The ID of the database ruleset to update the tables for (required)
:param TableMetadataBulkInput body: The exact list of tables to put in the ruleset. Note that existing tables for this ruleset not in this list will be deleted (required)
:return: AsyncTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bulk_table_update_with_http_info(database_ruleset_id, body, **kwargs) # noqa: E501
else:
(data) = self.bulk_table_update_with_http_info(database_ruleset_id, body, **kwargs) # noqa: E501
return data
def bulk_table_update_with_http_info(self, database_ruleset_id, body, **kwargs): # noqa: E501
"""Update the set of tables and their attributes associated with a database ruleset in bulk # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_table_update_with_http_info(database_ruleset_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int database_ruleset_id: The ID of the database ruleset to update the tables for (required)
:param TableMetadataBulkInput body: The exact list of tables to put in the ruleset. Note that existing tables for this ruleset not in this list will be deleted (required)
:return: AsyncTask
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['database_ruleset_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bulk_table_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'database_ruleset_id' is set
if self.api_client.client_side_validation and ('database_ruleset_id' not in params or
params['database_ruleset_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `database_ruleset_id` when calling `bulk_table_update`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `bulk_table_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'database_ruleset_id' in params:
path_params['databaseRulesetId'] = params['database_ruleset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/database-rulesets/{databaseRulesetId}/bulk-table-update', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AsyncTask', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_database_ruleset(self, body, **kwargs): # noqa: E501
"""Create database ruleset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_database_ruleset(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DatabaseRuleset body: The database ruleset to create (required)
:return: DatabaseRuleset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_database_ruleset_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_database_ruleset_with_http_info(body, **kwargs) # noqa: E501
return data
def create_database_ruleset_with_http_info(self, body, **kwargs): # noqa: E501
"""Create database ruleset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_database_ruleset_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DatabaseRuleset body: The database ruleset to create (required)
:return: DatabaseRuleset
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_database_ruleset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `create_database_ruleset`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/database-rulesets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DatabaseRuleset', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_database_ruleset(self, database_ruleset_id, **kwargs): # noqa: E501
"""Delete database ruleset by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_database_ruleset(database_ruleset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int database_ruleset_id: The ID of the database ruleset to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_database_ruleset_with_http_info(database_ruleset_id, **kwargs) # noqa: E501
else:
(data) = self.delete_database_ruleset_with_http_info(database_ruleset_id, **kwargs) # noqa: E501
return data
def delete_database_ruleset_with_http_info(self, database_ruleset_id, **kwargs): # noqa: E501
"""Delete database ruleset by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_database_ruleset_with_http_info(database_ruleset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int database_ruleset_id: The ID of the database ruleset to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['database_ruleset_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_database_ruleset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'database_ruleset_id' is set
if self.api_client.client_side_validation and ('database_ruleset_id' not in params or
params['database_ruleset_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `database_ruleset_id` when calling `delete_database_ruleset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'database_ruleset_id' in params:
path_params['databaseRulesetId'] = params['database_ruleset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/database-rulesets/{databaseRulesetId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_database_rulesets(self, **kwargs): # noqa: E501
"""Get all database rulesets # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_database_rulesets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int environment_id: The ID of the environment to get all database rulesets from
:param int page_number: The page number for which to get database rulesets. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:return: DatabaseRulesetList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_database_rulesets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_database_rulesets_with_http_info(**kwargs) # noqa: E501
return data
def get_all_database_rulesets_with_http_info(self, **kwargs): # noqa: E501
"""Get all database rulesets # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_database_rulesets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int environment_id: The ID of the environment to get all database rulesets from
:param int page_number: The page number for which to get database rulesets. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:return: DatabaseRulesetList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment_id', 'page_number', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_database_rulesets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'environment_id' in params:
query_params.append(('environment_id', params['environment_id'])) # noqa: E501
if 'page_number' in params:
query_params.append(('page_number', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/database-rulesets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DatabaseRulesetList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_database_ruleset_by_id(self, database_ruleset_id, **kwargs): # noqa: E501
"""Get database ruleset by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_database_ruleset_by_id(database_ruleset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int database_ruleset_id: The ID of the database ruleset to get (required)
:return: DatabaseRuleset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_database_ruleset_by_id_with_http_info(database_ruleset_id, **kwargs) # noqa: E501
else:
(data) = self.get_database_ruleset_by_id_with_http_info(database_ruleset_id, **kwargs) # noqa: E501
return data
def get_database_ruleset_by_id_with_http_info(self, database_ruleset_id, **kwargs): # noqa: E501
"""Get database ruleset by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_database_ruleset_by_id_with_http_info(database_ruleset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int database_ruleset_id: The ID of the database ruleset to get (required)
:return: DatabaseRuleset
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['database_ruleset_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_database_ruleset_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'database_ruleset_id' is set
if self.api_client.client_side_validation and ('database_ruleset_id' not in params or
params['database_ruleset_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `database_ruleset_id` when calling `get_database_ruleset_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'database_ruleset_id' in params:
path_params['databaseRulesetId'] = params['database_ruleset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/database-rulesets/{databaseRulesetId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DatabaseRuleset', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def refresh_database_ruleset(self, database_ruleset_id, **kwargs): # noqa: E501
"""Refresh database ruleset by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.refresh_database_ruleset(database_ruleset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int database_ruleset_id: The ID of the database ruleset to refresh (required)
:return: AsyncTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.refresh_database_ruleset_with_http_info(database_ruleset_id, **kwargs) # noqa: E501
else:
(data) = self.refresh_database_ruleset_with_http_info(database_ruleset_id, **kwargs) # noqa: E501
return data
def refresh_database_ruleset_with_http_info(self, database_ruleset_id, **kwargs): # noqa: E501
"""Refresh database ruleset by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.refresh_database_ruleset_with_http_info(database_ruleset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int database_ruleset_id: The ID of the database ruleset to refresh (required)
:return: AsyncTask
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['database_ruleset_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method refresh_database_ruleset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'database_ruleset_id' is set
if self.api_client.client_side_validation and ('database_ruleset_id' not in params or
params['database_ruleset_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `database_ruleset_id` when calling `refresh_database_ruleset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'database_ruleset_id' in params:
path_params['databaseRulesetId'] = params['database_ruleset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/database-rulesets/{databaseRulesetId}/refresh', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AsyncTask', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_database_ruleset(self, database_ruleset_id, body, **kwargs): # noqa: E501
"""Update database ruleset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_database_ruleset(database_ruleset_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int database_ruleset_id: The ID of the database ruleset to update (required)
:param DatabaseRuleset body: The updated form of the database ruleset (required)
:return: DatabaseRuleset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_database_ruleset_with_http_info(database_ruleset_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_database_ruleset_with_http_info(database_ruleset_id, body, **kwargs) # noqa: E501
return data
def update_database_ruleset_with_http_info(self, database_ruleset_id, body, **kwargs): # noqa: E501
"""Update database ruleset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_database_ruleset_with_http_info(database_ruleset_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int database_ruleset_id: The ID of the database ruleset to update (required)
:param DatabaseRuleset body: The updated form of the database ruleset (required)
:return: DatabaseRuleset
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['database_ruleset_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_database_ruleset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'database_ruleset_id' is set
if self.api_client.client_side_validation and ('database_ruleset_id' not in params or
params['database_ruleset_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `database_ruleset_id` when calling `update_database_ruleset`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `update_database_ruleset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'database_ruleset_id' in params:
path_params['databaseRulesetId'] = params['database_ruleset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/database-rulesets/{databaseRulesetId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DatabaseRuleset', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.533994
| 178
| 0.626894
| 3,498
| 30,029
| 5.108062
| 0.052887
| 0.119208
| 0.071357
| 0.028207
| 0.956682
| 0.952821
| 0.941795
| 0.925845
| 0.921816
| 0.919297
| 0
| 0.013814
| 0.291285
| 30,029
| 705
| 179
| 42.594326
| 0.825768
| 0.332146
| 0
| 0.802083
| 1
| 0
| 0.197832
| 0.058003
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039063
| false
| 0
| 0.010417
| 0
| 0.106771
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6e513ded3f8ec058495953e174f7365f2393b516
| 14,720
|
py
|
Python
|
datamine/loaders/1qbit.py
|
Saran33/datamine_python
|
396a01883fe98f31e32d506d50e4eeaa2de06466
|
[
"BSD-3-Clause"
] | 39
|
2019-05-15T19:22:03.000Z
|
2022-03-08T08:54:51.000Z
|
datamine/loaders/1qbit.py
|
Saran33/datamine_python
|
396a01883fe98f31e32d506d50e4eeaa2de06466
|
[
"BSD-3-Clause"
] | 9
|
2019-02-26T03:50:27.000Z
|
2021-07-24T15:31:38.000Z
|
datamine/loaders/1qbit.py
|
Saran33/datamine_python
|
396a01883fe98f31e32d506d50e4eeaa2de06466
|
[
"BSD-3-Clause"
] | 11
|
2019-04-16T12:32:29.000Z
|
2021-08-28T15:09:51.000Z
|
from . import Loader
import pandas as pd
class OneQBitLoader(Loader):
dataset = '1QBIT'
fileglob = '1QBit_*.csv'
columns = ['TRADEDATE', 'DATA_SOURCE', 'EODDESC', 'CHART_TITLE',
'YYYY', 'MM', 'DD', 'DATECODE_EXCEL', 'DATE_LABEL', 'F_PROD_CODE', 'O_PROD_CODE',
'PRICE_SETTLE_ACTIVE', 'PRICE_HIGH_ACTIVE', 'PRICE_LOW_ACTIVE', 'YYYY_ACTIVE', 'MM_ACTIVE', 'F_VOLUME_ACTIVE',
'PRICE_SETTLE_NEXT', 'PRICE_HIGH_NEXT', 'PRICE_LOW_NEXT', 'YYYY_NEXT', 'MM_NEXT', 'F_VOLUME_NEXT',
'F_VOLUME', 'IMPLIED_VOL', 'PUT_VOLUME', 'CALL_VOLUME', 'OPTIONS_VOLUME', 'PUT_OI', 'CALL_OI', 'O_OI',
'CURRENT_PRICE_MOST_ACTIVE', 'PREVIOUS_PRICE_MOST_ACTIVE', 'PRICE_PCT_CHG', 'EXCESS_RETURN_INDEX',
'IMPLIED_VOL_ST', 'IMPLIED_VOL_LT', 'DAILY_VARIANCE', 'HISTORICAL_STD_ST', 'HISTORICAL_STD_LT', 'RATIO_STD_ST_LT',
'RATIO_STD_ST_TO_IMPLIED_VOL_CURRENT', 'RATIO_HIGH_LOW_PCT', 'HIGH_LOW_PCT_ST', 'HIGH_LOW_PCT_LT',
'RATIO_HIGH_LOW_ST_LT', 'PUT_VOLUME_ST', 'PUT_VOLUME_LT', 'RATIO_PUT_VOLUME_ST_LT', 'CALL_VOLUME_ST',
'CALL_VOLUME_LT', 'RATIO_CALL_VOLUME_ST_LT', 'RATIO_PUT_CALL_VOLUME_ST', 'RATIO_PUT_CALL_VOLUME_LT',
'PCT_DIF_PUT_CALL_ST_LT_RATIO', 'MOMENTUM_ST', 'MOMENTUM_LT', 'RATIO_MOMENTUM_ST_LT', 'RATIO_MOMENTUM_TO_STD_ST',
'RATIO_MOMENTUM_TO_STD_LT', 'PRICE_20D_MA', 'PRICE_60D_MA', 'PRICE_200D_MA', 'PCT_DIF_CURRENT_200D_PRICE',
'PCT_DIF_20D_200D_PRICE', 'PEAK_PRICE', 'PEAK_200D_PRICE', '20PCT_BELOW_PEAK_200D', '20PCT_ABOVE_60DMA',
'20PCT_BELOW_60DMA', 'MIX_PROB_20PCT_ABOVE_60DMA',
'MIX_PROB_20PCT_BELOW_60DMA', 'MIX_MEAN', 'MIX_MEDIAN', 'MIX_MODE_1', 'MIX_MODE_2',
'MIX_STD', 'MIX_STD_LT', 'MIX_SKEW', 'MIX_KURTOSIS', 'MIX_STATE', 'MIX_COMPLACENT', 'MIX_BALANCED', 'MIX_ANXIOUS',
'MIX_CONFLICTED', 'MIX_MODALITY', 'MIX_DISTANCE', 'MIX_INTENSITY', 'MIX_LOW_BIN', 'MIX_BIN_SIZE', 'MIX_BINS',
'MIX_BIN_NEG_100', 'MIX_BIN_NEG_99', 'MIX_BIN_NEG_98', 'MIX_BIN_NEG_97', 'MIX_BIN_NEG_96', 'MIX_BIN_NEG_95',
'MIX_BIN_NEG_94', 'MIX_BIN_NEG_93', 'MIX_BIN_NEG_92', 'MIX_BIN_NEG_91', 'MIX_BIN_NEG_90', 'MIX_BIN_NEG_89',
'MIX_BIN_NEG_88', 'MIX_BIN_NEG_87', 'MIX_BIN_NEG_86', 'MIX_BIN_NEG_85', 'MIX_BIN_NEG_84', 'MIX_BIN_NEG_83',
'MIX_BIN_NEG_82', 'MIX_BIN_NEG_81', 'MIX_BIN_NEG_80', 'MIX_BIN_NEG_79', 'MIX_BIN_NEG_78', 'MIX_BIN_NEG_77',
'MIX_BIN_NEG_76', 'MIX_BIN_NEG_75', 'MIX_BIN_NEG_74', 'MIX_BIN_NEG_73', 'MIX_BIN_NEG_72', 'MIX_BIN_NEG_71',
'MIX_BIN_NEG_70', 'MIX_BIN_NEG_69', 'MIX_BIN_NEG_68', 'MIX_BIN_NEG_67', 'MIX_BIN_NEG_66', 'MIX_BIN_NEG_65',
'MIX_BIN_NEG_64', 'MIX_BIN_NEG_63', 'MIX_BIN_NEG_62', 'MIX_BIN_NEG_61', 'MIX_BIN_NEG_60', 'MIX_BIN_NEG_59',
'MIX_BIN_NEG_58', 'MIX_BIN_NEG_57', 'MIX_BIN_NEG_56', 'MIX_BIN_NEG_55', 'MIX_BIN_NEG_54', 'MIX_BIN_NEG_53',
'MIX_BIN_NEG_52', 'MIX_BIN_NEG_51', 'MIX_BIN_NEG_50', 'MIX_BIN_NEG_49', 'MIX_BIN_NEG_48', 'MIX_BIN_NEG_47',
'MIX_BIN_NEG_46', 'MIX_BIN_NEG_45', 'MIX_BIN_NEG_44', 'MIX_BIN_NEG_43', 'MIX_BIN_NEG_42', 'MIX_BIN_NEG_41',
'MIX_BIN_NEG_40', 'MIX_BIN_NEG_39', 'MIX_BIN_NEG_38', 'MIX_BIN_NEG_37', 'MIX_BIN_NEG_36', 'MIX_BIN_NEG_35',
'MIX_BIN_NEG_34', 'MIX_BIN_NEG_33', 'MIX_BIN_NEG_32', 'MIX_BIN_NEG_31', 'MIX_BIN_NEG_30', 'MIX_BIN_NEG_29',
'MIX_BIN_NEG_28', 'MIX_BIN_NEG_27', 'MIX_BIN_NEG_26', 'MIX_BIN_NEG_25', 'MIX_BIN_NEG_24', 'MIX_BIN_NEG_23',
'MIX_BIN_NEG_22', 'MIX_BIN_NEG_21', 'MIX_BIN_NEG_20', 'MIX_BIN_NEG_19', 'MIX_BIN_NEG_18', 'MIX_BIN_NEG_17',
'MIX_BIN_NEG_16', 'MIX_BIN_NEG_15', 'MIX_BIN_NEG_14', 'MIX_BIN_NEG_13', 'MIX_BIN_NEG_12', 'MIX_BIN_NEG_11',
'MIX_BIN_NEG_10', 'MIX_BIN_NEG_09', 'MIX_BIN_NEG_08', 'MIX_BIN_NEG_07', 'MIX_BIN_NEG_06', 'MIX_BIN_NEG_05',
'MIX_BIN_NEG_04', 'MIX_BIN_NEG_03', 'MIX_BIN_NEG_02', 'MIX_BIN_NEG_01', 'MIX_BIN_POS_00', 'MIX_BIN_POS_01',
'MIX_BIN_POS_02', 'MIX_BIN_POS_03', 'MIX_BIN_POS_04', 'MIX_BIN_POS_05', 'MIX_BIN_POS_06', 'MIX_BIN_POS_07',
'MIX_BIN_POS_08', 'MIX_BIN_POS_09', 'MIX_BIN_POS_10', 'MIX_BIN_POS_11', 'MIX_BIN_POS_12', 'MIX_BIN_POS_13',
'MIX_BIN_POS_14', 'MIX_BIN_POS_15', 'MIX_BIN_POS_16', 'MIX_BIN_POS_17', 'MIX_BIN_POS_18', 'MIX_BIN_POS_19',
'MIX_BIN_POS_20', 'MIX_BIN_POS_21', 'MIX_BIN_POS_22', 'MIX_BIN_POS_23', 'MIX_BIN_POS_24', 'MIX_BIN_POS_25',
'MIX_BIN_POS_26', 'MIX_BIN_POS_27', 'MIX_BIN_POS_28', 'MIX_BIN_POS_29', 'MIX_BIN_POS_30', 'MIX_BIN_POS_31',
'MIX_BIN_POS_32', 'MIX_BIN_POS_33', 'MIX_BIN_POS_34', 'MIX_BIN_POS_35', 'MIX_BIN_POS_36', 'MIX_BIN_POS_37',
'MIX_BIN_POS_38', 'MIX_BIN_POS_39', 'MIX_BIN_POS_40', 'MIX_BIN_POS_41', 'MIX_BIN_POS_42', 'MIX_BIN_POS_43',
'MIX_BIN_POS_44', 'MIX_BIN_POS_45', 'MIX_BIN_POS_46', 'MIX_BIN_POS_47', 'MIX_BIN_POS_48', 'MIX_BIN_POS_49',
'MIX_BIN_POS_50', 'MIX_BIN_POS_51', 'MIX_BIN_POS_52', 'MIX_BIN_POS_53', 'MIX_BIN_POS_54', 'MIX_BIN_POS_55',
'MIX_BIN_POS_56', 'MIX_BIN_POS_57', 'MIX_BIN_POS_58', 'MIX_BIN_POS_59', 'MIX_BIN_POS_60', 'MIX_BIN_POS_61',
'MIX_BIN_POS_62', 'MIX_BIN_POS_63', 'MIX_BIN_POS_64', 'MIX_BIN_POS_65', 'MIX_BIN_POS_66', 'MIX_BIN_POS_67',
'MIX_BIN_POS_68', 'MIX_BIN_POS_69', 'MIX_BIN_POS_70', 'MIX_BIN_POS_71', 'MIX_BIN_POS_72', 'MIX_BIN_POS_73',
'MIX_BIN_POS_74', 'MIX_BIN_POS_75', 'MIX_BIN_POS_76', 'MIX_BIN_POS_77', 'MIX_BIN_POS_78', 'MIX_BIN_POS_79',
'MIX_BIN_POS_80', 'MIX_BIN_POS_81', 'MIX_BIN_POS_82', 'MIX_BIN_POS_83', 'MIX_BIN_POS_84', 'MIX_BIN_POS_85',
'MIX_BIN_POS_86', 'MIX_BIN_POS_87', 'MIX_BIN_POS_88', 'MIX_BIN_POS_89', 'MIX_BIN_POS_90', 'MIX_BIN_POS_91',
'MIX_BIN_POS_92', 'MIX_BIN_POS_93', 'MIX_BIN_POS_94', 'MIX_BIN_POS_95', 'MIX_BIN_POS_96', 'MIX_BIN_POS_97',
'MIX_BIN_POS_98', 'MIX_BIN_POS_99', 'MIX_BIN_POS_100', 'MIX_BIN_POS_101', 'MIX_BIN_POS_102', 'MIX_BIN_POS_103',
'MIX_BIN_POS_104', 'MIX_BIN_POS_105', 'MIX_BIN_POS_106', 'MIX_BIN_POS_107', 'MIX_BIN_POS_108', 'MIX_BIN_POS_109',
'MIX_BIN_POS_110', 'MIX_BIN_POS_111', 'MIX_BIN_POS_112', 'MIX_BIN_POS_113', 'MIX_BIN_POS_114', 'MIX_BIN_POS_115',
'MIX_BIN_POS_116', 'MIX_BIN_POS_117', 'MIX_BIN_POS_118', 'MIX_BIN_POS_119', 'MIX_BIN_POS_120', 'MIX_BIN_POS_121',
'MIX_BIN_POS_122', 'MIX_BIN_POS_123', 'MIX_BIN_POS_124', 'MIX_BIN_POS_125', 'MIX_BIN_POS_126', 'MIX_BIN_POS_127',
'MIX_BIN_POS_128', 'MIX_BIN_POS_129', 'MIX_BIN_POS_130', 'MIX_BIN_POS_131', 'MIX_BIN_POS_132', 'MIX_BIN_POS_133',
'MIX_BIN_POS_134', 'MIX_BIN_POS_135', 'MIX_BIN_POS_136', 'MIX_BIN_POS_137', 'MIX_BIN_POS_138', 'MIX_BIN_POS_139',
'MIX_BIN_POS_140', 'MIX_BIN_POS_141', 'MIX_BIN_POS_142', 'MIX_BIN_POS_143', 'MIX_BIN_POS_144', 'MIX_BIN_POS_145',
'MIX_BIN_POS_146', 'MIX_BIN_POS_147', 'MIX_BIN_POS_148', 'MIX_BIN_POS_149', 'MIX_BIN_POS_150', 'MIX_BIN_POS_151',
'MIX_BIN_POS_152', 'MIX_BIN_POS_153', 'MIX_BIN_POS_154', 'MIX_BIN_POS_155']
dtypes = {'category': ('DATA_SOURCE', 'EODDESC', 'CHART_TITLE', 'F_PROD_CODE', 'O_PROD_CODE','MIX_STATE','MIX_MODALITY'),
'int64': ('YYYY', 'MM', 'DD', 'YYYY_ACTIVE', 'MM_ACTIVE', 'F_VOLUME_ACTIVE',
'YYYY_NEXT', 'MM_NEXT', 'F_VOLUME_NEXT', 'F_VOLUME', 'PUT_VOLUME', 'CALL_VOLUME',
'OPTIONS_VOLUME', 'PUT_OI', 'CALL_OI','O_OI',
'MIX_COMPLACENT', 'MIX_BALANCED', 'MIX_ANXIOUS', 'MIX_CONFLICTED','MIX_DISTANCE'),
'float': ('DATECODE_EXCEL','CURRENT_PRICE_MOST_ACTIVE', 'PREVIOUS_PRICE_MOST_ACTIVE', 'PRICE_PCT_CHG', 'EXCESS_RETURN_INDEX',
'IMPLIED_VOL_ST', 'IMPLIED_VOL_LT', 'DAILY_VARIANCE', 'HISTORICAL_STD_ST', 'HISTORICAL_STD_LT', 'RATIO_STD_ST_LT',
'RATIO_STD_ST_TO_IMPLIED_VOL_CURRENT', 'RATIO_HIGH_LOW_PCT', 'HIGH_LOW_PCT_ST', 'HIGH_LOW_PCT_LT',
'RATIO_HIGH_LOW_ST_LT', 'PUT_VOLUME_ST', 'PUT_VOLUME_LT', 'RATIO_PUT_VOLUME_ST_LT', 'CALL_VOLUME_ST',
'CALL_VOLUME_LT', 'RATIO_CALL_VOLUME_ST_LT', 'RATIO_PUT_CALL_VOLUME_ST', 'RATIO_PUT_CALL_VOLUME_LT',
'PCT_DIF_PUT_CALL_ST_LT_RATIO', 'MOMENTUM_ST', 'MOMENTUM_LT', 'RATIO_MOMENTUM_ST_LT', 'RATIO_MOMENTUM_TO_STD_ST',
'RATIO_MOMENTUM_TO_STD_LT', 'PRICE_20D_MA', 'PRICE_60D_MA', 'PRICE_200D_MA', 'PCT_DIF_CURRENT_200D_PRICE',
'PCT_DIF_20D_200D_PRICE', 'PEAK_PRICE', 'PEAK_200D_PRICE', '20PCT_BELOW_PEAK_200D', '20PCT_ABOVE_60DMA',
'20PCT_BELOW_60DMA', 'MIX_PROB_20PCT_ABOVE_60DMA',
'MIX_PROB_20PCT_BELOW_60DMA', 'MIX_MEAN', 'MIX_MEDIAN', 'MIX_MODE_1', 'MIX_MODE_2',
'MIX_STD', 'MIX_STD_LT', 'MIX_SKEW', 'MIX_KURTOSIS','MIX_INTENSITY', 'MIX_LOW_BIN', 'MIX_BIN_SIZE', 'MIX_BINS',
'MIX_BIN_NEG_100', 'MIX_BIN_NEG_99', 'MIX_BIN_NEG_98', 'MIX_BIN_NEG_97', 'MIX_BIN_NEG_96', 'MIX_BIN_NEG_95',
'MIX_BIN_NEG_94', 'MIX_BIN_NEG_93', 'MIX_BIN_NEG_92', 'MIX_BIN_NEG_91', 'MIX_BIN_NEG_90', 'MIX_BIN_NEG_89',
'MIX_BIN_NEG_88', 'MIX_BIN_NEG_87', 'MIX_BIN_NEG_86', 'MIX_BIN_NEG_85', 'MIX_BIN_NEG_84', 'MIX_BIN_NEG_83',
'MIX_BIN_NEG_82', 'MIX_BIN_NEG_81', 'MIX_BIN_NEG_80', 'MIX_BIN_NEG_79', 'MIX_BIN_NEG_78', 'MIX_BIN_NEG_77',
'MIX_BIN_NEG_76', 'MIX_BIN_NEG_75', 'MIX_BIN_NEG_74', 'MIX_BIN_NEG_73', 'MIX_BIN_NEG_72', 'MIX_BIN_NEG_71',
'MIX_BIN_NEG_70', 'MIX_BIN_NEG_69', 'MIX_BIN_NEG_68', 'MIX_BIN_NEG_67', 'MIX_BIN_NEG_66', 'MIX_BIN_NEG_65',
'MIX_BIN_NEG_64', 'MIX_BIN_NEG_63', 'MIX_BIN_NEG_62', 'MIX_BIN_NEG_61', 'MIX_BIN_NEG_60', 'MIX_BIN_NEG_59',
'MIX_BIN_NEG_58', 'MIX_BIN_NEG_57', 'MIX_BIN_NEG_56', 'MIX_BIN_NEG_55', 'MIX_BIN_NEG_54', 'MIX_BIN_NEG_53',
'MIX_BIN_NEG_52', 'MIX_BIN_NEG_51', 'MIX_BIN_NEG_50', 'MIX_BIN_NEG_49', 'MIX_BIN_NEG_48', 'MIX_BIN_NEG_47',
'MIX_BIN_NEG_46', 'MIX_BIN_NEG_45', 'MIX_BIN_NEG_44', 'MIX_BIN_NEG_43', 'MIX_BIN_NEG_42', 'MIX_BIN_NEG_41',
'MIX_BIN_NEG_40', 'MIX_BIN_NEG_39', 'MIX_BIN_NEG_38', 'MIX_BIN_NEG_37', 'MIX_BIN_NEG_36', 'MIX_BIN_NEG_35',
'MIX_BIN_NEG_34', 'MIX_BIN_NEG_33', 'MIX_BIN_NEG_32', 'MIX_BIN_NEG_31', 'MIX_BIN_NEG_30', 'MIX_BIN_NEG_29',
'MIX_BIN_NEG_28', 'MIX_BIN_NEG_27', 'MIX_BIN_NEG_26', 'MIX_BIN_NEG_25', 'MIX_BIN_NEG_24', 'MIX_BIN_NEG_23',
'MIX_BIN_NEG_22', 'MIX_BIN_NEG_21', 'MIX_BIN_NEG_20', 'MIX_BIN_NEG_19', 'MIX_BIN_NEG_18', 'MIX_BIN_NEG_17',
'MIX_BIN_NEG_16', 'MIX_BIN_NEG_15', 'MIX_BIN_NEG_14', 'MIX_BIN_NEG_13', 'MIX_BIN_NEG_12', 'MIX_BIN_NEG_11',
'MIX_BIN_NEG_10', 'MIX_BIN_NEG_09', 'MIX_BIN_NEG_08', 'MIX_BIN_NEG_07', 'MIX_BIN_NEG_06', 'MIX_BIN_NEG_05',
'MIX_BIN_NEG_04', 'MIX_BIN_NEG_03', 'MIX_BIN_NEG_02', 'MIX_BIN_NEG_01', 'MIX_BIN_POS_00', 'MIX_BIN_POS_01',
'MIX_BIN_POS_02', 'MIX_BIN_POS_03', 'MIX_BIN_POS_04', 'MIX_BIN_POS_05', 'MIX_BIN_POS_06', 'MIX_BIN_POS_07',
'MIX_BIN_POS_08', 'MIX_BIN_POS_09', 'MIX_BIN_POS_10', 'MIX_BIN_POS_11', 'MIX_BIN_POS_12', 'MIX_BIN_POS_13',
'MIX_BIN_POS_14', 'MIX_BIN_POS_15', 'MIX_BIN_POS_16', 'MIX_BIN_POS_17', 'MIX_BIN_POS_18', 'MIX_BIN_POS_19',
'MIX_BIN_POS_20', 'MIX_BIN_POS_21', 'MIX_BIN_POS_22', 'MIX_BIN_POS_23', 'MIX_BIN_POS_24', 'MIX_BIN_POS_25',
'MIX_BIN_POS_26', 'MIX_BIN_POS_27', 'MIX_BIN_POS_28', 'MIX_BIN_POS_29', 'MIX_BIN_POS_30', 'MIX_BIN_POS_31',
'MIX_BIN_POS_32', 'MIX_BIN_POS_33', 'MIX_BIN_POS_34', 'MIX_BIN_POS_35', 'MIX_BIN_POS_36', 'MIX_BIN_POS_37',
'MIX_BIN_POS_38', 'MIX_BIN_POS_39', 'MIX_BIN_POS_40', 'MIX_BIN_POS_41', 'MIX_BIN_POS_42', 'MIX_BIN_POS_43',
'MIX_BIN_POS_44', 'MIX_BIN_POS_45', 'MIX_BIN_POS_46', 'MIX_BIN_POS_47', 'MIX_BIN_POS_48', 'MIX_BIN_POS_49',
'MIX_BIN_POS_50', 'MIX_BIN_POS_51', 'MIX_BIN_POS_52', 'MIX_BIN_POS_53', 'MIX_BIN_POS_54', 'MIX_BIN_POS_55',
'MIX_BIN_POS_56', 'MIX_BIN_POS_57', 'MIX_BIN_POS_58', 'MIX_BIN_POS_59', 'MIX_BIN_POS_60', 'MIX_BIN_POS_61',
'MIX_BIN_POS_62', 'MIX_BIN_POS_63', 'MIX_BIN_POS_64', 'MIX_BIN_POS_65', 'MIX_BIN_POS_66', 'MIX_BIN_POS_67',
'MIX_BIN_POS_68', 'MIX_BIN_POS_69', 'MIX_BIN_POS_70', 'MIX_BIN_POS_71', 'MIX_BIN_POS_72', 'MIX_BIN_POS_73',
'MIX_BIN_POS_74', 'MIX_BIN_POS_75', 'MIX_BIN_POS_76', 'MIX_BIN_POS_77', 'MIX_BIN_POS_78', 'MIX_BIN_POS_79',
'MIX_BIN_POS_80', 'MIX_BIN_POS_81', 'MIX_BIN_POS_82', 'MIX_BIN_POS_83', 'MIX_BIN_POS_84', 'MIX_BIN_POS_85',
'MIX_BIN_POS_86', 'MIX_BIN_POS_87', 'MIX_BIN_POS_88', 'MIX_BIN_POS_89', 'MIX_BIN_POS_90', 'MIX_BIN_POS_91',
'MIX_BIN_POS_92', 'MIX_BIN_POS_93', 'MIX_BIN_POS_94', 'MIX_BIN_POS_95', 'MIX_BIN_POS_96', 'MIX_BIN_POS_97',
'MIX_BIN_POS_98', 'MIX_BIN_POS_99', 'MIX_BIN_POS_100', 'MIX_BIN_POS_101', 'MIX_BIN_POS_102', 'MIX_BIN_POS_103',
'MIX_BIN_POS_104', 'MIX_BIN_POS_105', 'MIX_BIN_POS_106', 'MIX_BIN_POS_107', 'MIX_BIN_POS_108', 'MIX_BIN_POS_109',
'MIX_BIN_POS_110', 'MIX_BIN_POS_111', 'MIX_BIN_POS_112', 'MIX_BIN_POS_113', 'MIX_BIN_POS_114', 'MIX_BIN_POS_115',
'MIX_BIN_POS_116', 'MIX_BIN_POS_117', 'MIX_BIN_POS_118', 'MIX_BIN_POS_119', 'MIX_BIN_POS_120', 'MIX_BIN_POS_121',
'MIX_BIN_POS_122', 'MIX_BIN_POS_123', 'MIX_BIN_POS_124', 'MIX_BIN_POS_125', 'MIX_BIN_POS_126', 'MIX_BIN_POS_127',
'MIX_BIN_POS_128', 'MIX_BIN_POS_129', 'MIX_BIN_POS_130', 'MIX_BIN_POS_131', 'MIX_BIN_POS_132', 'MIX_BIN_POS_133',
'MIX_BIN_POS_134', 'MIX_BIN_POS_135', 'MIX_BIN_POS_136', 'MIX_BIN_POS_137', 'MIX_BIN_POS_138', 'MIX_BIN_POS_139',
'MIX_BIN_POS_140', 'MIX_BIN_POS_141', 'MIX_BIN_POS_142', 'MIX_BIN_POS_143', 'MIX_BIN_POS_144', 'MIX_BIN_POS_145',
'MIX_BIN_POS_146', 'MIX_BIN_POS_147', 'MIX_BIN_POS_148', 'MIX_BIN_POS_149', 'MIX_BIN_POS_150', 'MIX_BIN_POS_151',
'MIX_BIN_POS_152', 'MIX_BIN_POS_153', 'MIX_BIN_POS_154', 'MIX_BIN_POS_155'),
'date': ('DATE_LABEL'),
'date:%Y%m%d': ('TRADEDATE')}
def _load(self, file):
df = pd.read_csv(file, skiprows = [1,2], low_memory=False)
return df
oneqbitloader = OneQBitLoader()
| 107.445255
| 139
| 0.67697
| 2,606
| 14,720
| 3.10284
| 0.101305
| 0.3814
| 0.347267
| 0.005936
| 0.945709
| 0.93903
| 0.934578
| 0.926911
| 0.926911
| 0.906629
| 0
| 0.101415
| 0.178736
| 14,720
| 136
| 140
| 108.235294
| 0.567458
| 0
| 0
| 0.784615
| 0
| 0
| 0.665489
| 0.051087
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007692
| false
| 0
| 0.015385
| 0
| 0.069231
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
2830f1da2977dcf1c3b32c0ca8f80de5cc150d56
| 3,351
|
py
|
Python
|
matrox/tests/linalg/test_elimination.py
|
rkty13/Matrox
|
e11f9c04ba62e5c23f5eee6c3c4ee1d183919653
|
[
"MIT"
] | 3
|
2017-02-26T22:07:15.000Z
|
2017-12-30T21:25:39.000Z
|
matrox/tests/linalg/test_elimination.py
|
rkty13/Matrox
|
e11f9c04ba62e5c23f5eee6c3c4ee1d183919653
|
[
"MIT"
] | 11
|
2017-02-02T03:07:51.000Z
|
2018-01-02T21:10:32.000Z
|
matrox/tests/linalg/test_elimination.py
|
rkty13/Matrox
|
e11f9c04ba62e5c23f5eee6c3c4ee1d183919653
|
[
"MIT"
] | 1
|
2017-02-26T22:07:16.000Z
|
2017-02-26T22:07:16.000Z
|
import unittest
from matrox import Matrix, fill_matrix
from matrox.linalg import *
class TestMatrixElimination(unittest.TestCase):
def test_gaussian_elimination(self):
matrix = Matrix([[1, 2, 3], [4, 5, 6]], fraction=True)
c_matrix, traceback, inverse_traceback = gaussian_elimination(matrix)
self.assertEqual(repr(c_matrix),
"Matrix([['1', '0', '-1'], ['0', '1', '2']])")
self.assertEqual(repr(traceback), "[]")
self.assertEqual(repr(inverse_traceback), "[]")
matrix = Matrix([[4, 7], [2, 6]], fraction=True)
c_matrix, traceback, inverse_traceback = gaussian_elimination(matrix,
history=True)
self.assertEqual(repr(c_matrix),
"Matrix([['1', '0'], ['0', '1']])")
self.assertEqual(repr(traceback),
"[Matrix([['1/4', '0'], ['0', '1']]), " +
"Matrix([['1', '0'], ['-2', '1']]), " +
"Matrix([['1', '0'], ['0', '2/5']]), " +
"Matrix([['1', '-7/4'], ['0', '1']])]")
self.assertEqual(repr(inverse_traceback), "[]")
matrix = Matrix([[4, 7], [2, 6]], fraction=True)
c_matrix, traceback, inverse_traceback = gaussian_elimination(matrix,
inverse_history=True)
self.assertEqual(repr(c_matrix),
"Matrix([['1', '0'], ['0', '1']])")
self.assertEqual(repr(traceback), "[]")
self.assertEqual(repr(inverse_traceback),
"[Matrix([['4', '0'], ['0', '1']]), " +
"Matrix([['1', '0'], ['2', '1']]), " +
"Matrix([['1', '0'], ['0', '5/2']]), " +
"Matrix([['1', '7/4'], ['0', '1']])]")
matrix = Matrix([[0, 0, 0], [3, 2, 1]], fraction=True)
c_matrix, traceback, inverse_traceback = gaussian_elimination(matrix,
history=True, inverse_history=True)
self.assertEqual(repr(c_matrix),
"Matrix([['1', '2/3', '1/3'], ['0', '0', '0']])")
def test_rref(self):
matrix = Matrix([[1, 2, 3], [4, 5, 6]], fraction=True)
c_matrix, traceback, inverse_traceback = rref(matrix)
self.assertEqual(repr(c_matrix),
"Matrix([['1', '0', '-1'], ['0', '1', '2']])")
self.assertEqual(repr(traceback), "[]")
self.assertEqual(repr(inverse_traceback), "[]")
matrix = fill_matrix(3, 3, 2, fraction=True)
c_matrix, traceback, inverse_traceback = rref(matrix)
self.assertEqual(repr(c_matrix),
"Matrix([['1', '1', '1'], ['0', '0', '0'], ['0', '0', '0']])")
self.assertEqual(repr(traceback), "[]")
self.assertEqual(repr(inverse_traceback), "[]")
matrix = Matrix([[1, 0, 3], [0, 0, 0], [1, 0, 3]], fraction=True)
c_matrix, traceback, inverse_traceback = rref(matrix)
self.assertEqual(repr(c_matrix),
"Matrix([['1', '0', '3'], ['0', '0', '0'], ['0', '0', '0']])")
self.assertEqual(repr(traceback), "[]")
self.assertEqual(repr(inverse_traceback), "[]")
def test_ref(self):
matrix = Matrix([[1, 2, 3], [4, 5, 6]], fraction=True)
c_matrix, traceback, inverse_traceback = ref(matrix)
self.assertEqual(repr(c_matrix),
"Matrix([['1', '2', '3'], ['0', '1', '2']])")
self.assertEqual(repr(traceback), "[]")
self.assertEqual(repr(inverse_traceback), "[]")
| 44.68
| 78
| 0.51895
| 384
| 3,351
| 4.416667
| 0.085938
| 0.194575
| 0.246462
| 0.089623
| 0.875
| 0.875
| 0.862028
| 0.862028
| 0.854363
| 0.832547
| 0
| 0.052694
| 0.246792
| 3,351
| 74
| 79
| 45.283784
| 0.619255
| 0
| 0
| 0.546875
| 0
| 0.046875
| 0.19815
| 0
| 0
| 0
| 0
| 0
| 0.34375
| 1
| 0.046875
| false
| 0
| 0.046875
| 0
| 0.109375
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
28550c2efcf05070bee0949972ae2e5c87311163
| 389
|
py
|
Python
|
setup1.py
|
nelliesnoodles/Whispering-Wall
|
7240c33cbafd078375320df2dc95551f42550ab7
|
[
"MIT"
] | null | null | null |
setup1.py
|
nelliesnoodles/Whispering-Wall
|
7240c33cbafd078375320df2dc95551f42550ab7
|
[
"MIT"
] | null | null | null |
setup1.py
|
nelliesnoodles/Whispering-Wall
|
7240c33cbafd078375320df2dc95551f42550ab7
|
[
"MIT"
] | 1
|
2019-03-20T19:54:14.000Z
|
2019-03-20T19:54:14.000Z
|
import os
try:
import nltk
nltk.download('wordnet')
nltk.download('punkt')
nltk.download('averaged_perceptron_tagger')
except ImportError as e:
os.system('pip3 install nltk')
import nltk
nltk.download('wordnet')
nltk.download('punkt')
nltk.download('averaged_perceptron_tagger')
try:
import PyEnchant
except ImportError as e:
os.system("pip3 install PyEnchant")
| 17.681818
| 45
| 0.74036
| 50
| 389
| 5.68
| 0.36
| 0.253521
| 0.098592
| 0.15493
| 0.852113
| 0.852113
| 0.852113
| 0.852113
| 0.577465
| 0.577465
| 0
| 0.006024
| 0.14653
| 389
| 21
| 46
| 18.52381
| 0.849398
| 0
| 0
| 0.75
| 0
| 0
| 0.29563
| 0.133676
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.375
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
289c16eb8f23cbea927a7442262f6d3438210313
| 222
|
py
|
Python
|
objs/Iterator.py
|
danilocgsilva/awsebdirect
|
3958ee73dac259b5493ea123aadee28bd6321802
|
[
"MIT"
] | null | null | null |
objs/Iterator.py
|
danilocgsilva/awsebdirect
|
3958ee73dac259b5493ea123aadee28bd6321802
|
[
"MIT"
] | null | null | null |
objs/Iterator.py
|
danilocgsilva/awsebdirect
|
3958ee73dac259b5493ea123aadee28bd6321802
|
[
"MIT"
] | null | null | null |
class Iterator:
def set_client_response(self, client_response: dict):
self.client_response = client_response
return self
def count(self):
return len(self.client_response['Environments'])
| 22.2
| 57
| 0.693694
| 26
| 222
| 5.692308
| 0.461538
| 0.472973
| 0.364865
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.225225
| 222
| 9
| 58
| 24.666667
| 0.860465
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
956bde75991fa1f5f5c24dba08c98fa437eb4679
| 11,576
|
py
|
Python
|
goutdotcom/lab/tests/test_models.py
|
Spiewart/goutdotcom
|
0916155732a72fcb8c8a2fb0f4dd81efef618af8
|
[
"MIT"
] | null | null | null |
goutdotcom/lab/tests/test_models.py
|
Spiewart/goutdotcom
|
0916155732a72fcb8c8a2fb0f4dd81efef618af8
|
[
"MIT"
] | null | null | null |
goutdotcom/lab/tests/test_models.py
|
Spiewart/goutdotcom
|
0916155732a72fcb8c8a2fb0f4dd81efef618af8
|
[
"MIT"
] | null | null | null |
from goutdotcom.profiles.models import PatientProfile
from goutdotcom.profiles.tests.factories import FamilyProfileFactory, PatientProfileFactory, SocialProfileFactory
from goutdotcom.users.tests.factories import UserFactory
from goutdotcom.lab.models import round_decimal
import pytest
from decimal import *
from .factories import UrateFactory, ASTFactory, ALTFactory, PlateletFactory, WBCFactory, HemoglobinFactory, CreatinineFactory
pytestmark = pytest.mark.django_db
class TestRoundDecimal:
def test_value_return(self):
value = Decimal(0.59343)
assert(value.quantize(Decimal(10) ** -2) == Decimal('0.59'))
class TestUrateMethods:
def test__str__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
urate = UrateFactory(user=user)
assert(urate.__str__() == str(urate.value))
def test__unicode__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
urate = UrateFactory(user=user)
assert(urate.__unicode__() == str(urate.name))
def test_get_absolute_url(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
urate = UrateFactory(user=user)
assert urate.get_absolute_url() == f"/lab/urate/{urate.pk}/"
class TestALTMethods:
def test__str__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
ALT = ALTFactory(user=user)
assert(ALT.__str__() == str(ALT.value))
def test__unicode__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
ALT = ALTFactory(user=user)
assert(ALT.__unicode__() == str(ALT.name))
def test_get_absolute_url(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
ALT = ALTFactory(user=user)
assert ALT.get_absolute_url() == f"/lab/ALT/{ALT.pk}/"
class TestASTMethods:
def test__str__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
AST = ASTFactory(user=user)
assert(AST.__str__() == str(AST.value))
def test__unicode__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
AST = ASTFactory(user=user)
assert(AST.__unicode__() == str(AST.name))
def test_get_absolute_url(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
AST = ASTFactory(user=user)
assert AST.get_absolute_url() == f"/lab/AST/{AST.pk}/"
class TestPlateletMethods:
def test__str__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
platelet = PlateletFactory(user=user)
assert(platelet.__str__() == str(platelet.value))
def test__unicode__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
platelet = PlateletFactory(user=user)
assert(platelet.__unicode__() == str(platelet.name))
def test_get_absolute_url(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
platelet = PlateletFactory(user=user)
assert platelet.get_absolute_url() == f"/lab/platelet/{platelet.pk}/"
class TestWBCMethods:
def test__str__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
WBC = WBCFactory(user=user)
assert(WBC.__str__() == str(WBC.value))
def test__unicode__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
WBC = WBCFactory(user=user)
assert(WBC.__unicode__() == str(WBC.name))
def test_get_absolute_url(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
WBC = WBCFactory(user=user)
assert WBC.get_absolute_url() == f"/lab/WBC/{WBC.pk}/"
class TestHemoglobinMethods:
def test__str__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
hemoglobin = HemoglobinFactory(user=user)
assert(hemoglobin.__str__() == str(hemoglobin.value))
def test__unicode__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
hemoglobin = HemoglobinFactory(user=user)
assert(hemoglobin.__unicode__() == str(hemoglobin.name))
def test_get_absolute_url(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
hemoglobin = HemoglobinFactory(user=user)
assert hemoglobin.get_absolute_url() == f"/lab/hemoglobin/{hemoglobin.pk}/"
class TestCreatinineMethods:
def test__str__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
creatinine = CreatinineFactory(user=user)
assert(creatinine.__str__() == str(creatinine.value))
def test__unicode__(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
creatinine = CreatinineFactory(user=user)
assert(creatinine.__unicode__() == str(creatinine.name))
def test_get_absolute_url(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
creatinine = CreatinineFactory(user=user)
assert creatinine.get_absolute_url() == f"/lab/creatinine/{creatinine.pk}/"
def test_sex_vars_kappa(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
creatinine = CreatinineFactory(user=user)
if profile.gender == 'male':
assert creatinine.sex_vars_kappa() == Decimal(0.9)
elif profile.gender == 'female':
assert creatinine.sex_vars_kappa() == Decimal(0.7)
else:
assert creatinine.eGFR_calculator().sex_vars_kappa() == False
def test_sex_vars_alpha(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
creatinine = CreatinineFactory(user=user)
if profile.gender == 'male':
assert creatinine.sex_vars_alpha() == Decimal(-0.411)
elif profile.gender == 'female':
assert creatinine.sex_vars_alpha() == Decimal(-0.329)
else:
assert creatinine.eGFR_calculator().sex_vars_kappa() == False
def test_race_modifier(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
creatinine = CreatinineFactory(user=user)
if profile.race == 'black':
assert creatinine.race_modifier() == Decimal(1.159)
elif profile.race == 'white' or profile.race == 'asian' or profile.race == 'native american' or profile.race == 'hispanic':
assert creatinine.race_modifier() == Decimal(1.00)
else:
assert creatinine.eGFR_calculator() == False
def test_sex_modifier(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
creatinine = CreatinineFactory(user=user)
if profile.gender == 'male':
assert creatinine.sex_modifier() == Decimal(1.018)
elif profile.gender == 'female' or profile.gender == 'non-binary':
assert creatinine.sex_modifier() == Decimal(1.00)
else:
assert creatinine.eGFR_calculator() == False
def test_eGFR_calculator(self):
user = UserFactory()
profile = PatientProfileFactory(user=user)
familyprofile = FamilyProfileFactory(user=user)
socialprofile = SocialProfileFactory(user=user)
creatinine = CreatinineFactory(user=user)
##assert Creatinine.eGFR_calculator() == "Can't calculate eGFR without an age (make a profile)"
kappa = 0
alpha = 0
race = 0
sex = 0
age = profile.get_age()
if profile.gender == 'male':
sex = Decimal(1.018)
kappa = Decimal(0.9)
alpha = Decimal(-0.411)
elif profile.gender == 'female':
sex = Decimal(1.00)
kappa = Decimal(0.7)
alpha = Decimal(-0.329)
else:
return "Something went wrong with eGFR calculation"
if profile.race == 'black':
race = Decimal(1.159)
elif profile.race == 'white' or profile.race == 'asian' or profile.race == 'native american' or profile.race == 'hispanic':
race = Decimal(1.00)
else:
return "Something went wrong with eGFR calculation"
eGFR = Decimal(141) * min(creatinine.value / kappa, Decimal(1.00)) ** alpha * max(creatinine.value / kappa, Decimal(1.00)
) ** Decimal(-1.209) * Decimal(0.993) ** age * race * sex
assert creatinine.eGFR_calculator() == round_decimal(eGFR, 2)
| 40.617544
| 147
| 0.662232
| 1,108
| 11,576
| 6.75
| 0.106498
| 0.111245
| 0.066052
| 0.090386
| 0.816018
| 0.796631
| 0.77751
| 0.772296
| 0.740072
| 0.740072
| 0
| 0.009729
| 0.236351
| 11,576
| 284
| 148
| 40.760563
| 0.836312
| 0.008034
| 0
| 0.713115
| 0
| 0
| 0.033272
| 0.009929
| 0
| 0
| 0
| 0
| 0.143443
| 1
| 0.110656
| false
| 0
| 0.028689
| 0
| 0.180328
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95c9cec8d771223ecbd4ec816d01af7316a498c0
| 199
|
py
|
Python
|
10/04/1/package1/package12/package121/module1212.py
|
pylangstudy/201706
|
f1cc6af6b18e5bd393cda27f5166067c4645d4d3
|
[
"CC0-1.0"
] | null | null | null |
10/04/1/package1/package12/package121/module1212.py
|
pylangstudy/201706
|
f1cc6af6b18e5bd393cda27f5166067c4645d4d3
|
[
"CC0-1.0"
] | 70
|
2017-06-01T11:02:51.000Z
|
2017-06-30T00:35:32.000Z
|
10/04/1/package1/package12/package121/module1212.py
|
pylangstudy/201706
|
f1cc6af6b18e5bd393cda27f5166067c4645d4d3
|
[
"CC0-1.0"
] | null | null | null |
from ....package11 import module112
#from ../..package11 import module112
#from ./../..package11 import module112
def some_method():
print('module1212.some_method()')
module112.some_method()
| 28.428571
| 39
| 0.723618
| 22
| 199
| 6.409091
| 0.409091
| 0.276596
| 0.404255
| 0.595745
| 0.595745
| 0.595745
| 0.595745
| 0.595745
| 0
| 0
| 0
| 0.125714
| 0.120603
| 199
| 6
| 40
| 33.166667
| 0.68
| 0.371859
| 0
| 0
| 0
| 0
| 0.195122
| 0.195122
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95ebb5080cc80e0f2081c01aa3d589c0da590500
| 394
|
py
|
Python
|
models/__init__.py
|
CSUBioGroup/DeepGOA
|
09802b14ca4f8be6e8d4c2a72e08dbf7876c3b30
|
[
"MIT"
] | 3
|
2020-09-09T15:57:28.000Z
|
2021-12-22T04:39:06.000Z
|
models/__init__.py
|
CSUBioGroup/DeepGOA
|
09802b14ca4f8be6e8d4c2a72e08dbf7876c3b30
|
[
"MIT"
] | 2
|
2020-09-07T16:13:41.000Z
|
2021-07-09T06:13:35.000Z
|
models/__init__.py
|
CSUBioGroup/DeepGOA
|
09802b14ca4f8be6e8d4c2a72e08dbf7876c3b30
|
[
"MIT"
] | null | null | null |
from .DeepGOA_model import DeepGOA
from .DeepGOA_model import DeepGOA_InterPro
from .DeepGOA_model import DeepGOA_InterPro_PPI
from .DeepGOA_model import DeepGOA_PPI
from .DeepGOA_model import DeepGOA_Seq
from .DeepGOA_model import DeepGOA_Seq_BiLSTM
from .DeepGOA_model import DeepGOA_Seq_PPI
from .DeepGOA_model import DeepGOA_Seq_Multi_CNN
from .DeepGOA_model import DeepGOA_Seq_InterPro
| 30.307692
| 48
| 0.878173
| 59
| 394
| 5.474576
| 0.169492
| 0.306502
| 0.44582
| 0.613003
| 0.931889
| 0.842105
| 0.216718
| 0
| 0
| 0
| 0
| 0
| 0.098985
| 394
| 12
| 49
| 32.833333
| 0.909859
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
251e852b73cbaa87fd38a225b1513a4d878f27f3
| 9,721
|
py
|
Python
|
tests_classla/test_slovenian_pipeline.py
|
lkrsnik/classla-stanfordnlp
|
1ab8771aadfbc648cec51b4c6716797f698e67ff
|
[
"Apache-2.0"
] | 1
|
2020-07-04T21:06:20.000Z
|
2020-07-04T21:06:20.000Z
|
tests_classla/test_slovenian_pipeline.py
|
lkrsnik/classla-stanfordnlp
|
1ab8771aadfbc648cec51b4c6716797f698e67ff
|
[
"Apache-2.0"
] | null | null | null |
tests_classla/test_slovenian_pipeline.py
|
lkrsnik/classla-stanfordnlp
|
1ab8771aadfbc648cec51b4c6716797f698e67ff
|
[
"Apache-2.0"
] | null | null | null |
"""
Basic testing of the English pipeline
"""
import pytest
import classla
from tests import *
# data for testing
SL_DOC = "France Prešeren je bil rojen v Vrbi. Danes je poznan kot največji slovenski pesnik. Študiral je na Dunaju."
SL_DOC_TOKENS_GOLD = """
<Token index=1;words=[<Word index=1;text=France;lemma=France;upos=PROPN;xpos=Npmsn;feats=Case=Nom|Gender=Masc|Number=Sing;governor=5;dependency_relation=nsubj>]>
<Token index=2;words=[<Word index=2;text=Prešeren;lemma=Prešeren;upos=PROPN;xpos=Npmsn;feats=Case=Nom|Gender=Masc|Number=Sing;governor=1;dependency_relation=flat_name>]>
<Token index=3;words=[<Word index=3;text=je;lemma=biti;upos=AUX;xpos=Va-r3s-n;feats=Mood=Ind|Number=Sing|Person=3|Polarity=Pos|Tense=Pres|VerbForm=Fin;governor=5;dependency_relation=aux>]>
<Token index=4;words=[<Word index=4;text=bil;lemma=biti;upos=AUX;xpos=Va-p-sm;feats=Gender=Masc|Number=Sing|VerbForm=Part;governor=5;dependency_relation=cop>]>
<Token index=5;words=[<Word index=5;text=rojen;lemma=rojen;upos=ADJ;xpos=Appmsnn;feats=Case=Nom|Definite=Ind|Degree=Pos|Gender=Masc|Number=Sing|VerbForm=Part;governor=0;dependency_relation=root>]>
<Token index=6;words=[<Word index=6;text=v;lemma=v;upos=ADP;xpos=Sl;feats=Case=Loc;governor=7;dependency_relation=case>]>
<Token index=7;words=[<Word index=7;text=Vrbi;lemma=Vrba;upos=PROPN;xpos=Npfsl;feats=Case=Loc|Gender=Fem|Number=Sing;governor=5;dependency_relation=obl>]>
<Token index=8;words=[<Word index=8;text=.;lemma=.;upos=PUNCT;xpos=Z;feats=_;governor=5;dependency_relation=punct>]>
<Token index=1;words=[<Word index=1;text=Danes;lemma=danes;upos=ADV;xpos=Rgp;feats=Degree=Pos;governor=3;dependency_relation=advmod>]>
<Token index=2;words=[<Word index=2;text=je;lemma=biti;upos=AUX;xpos=Va-r3s-n;feats=Mood=Ind|Number=Sing|Person=3|Polarity=Pos|Tense=Pres|VerbForm=Fin;governor=3;dependency_relation=cop>]>
<Token index=3;words=[<Word index=3;text=poznan;lemma=poznan;upos=ADJ;xpos=Appmsnn;feats=Case=Nom|Definite=Ind|Degree=Pos|Gender=Masc|Number=Sing|VerbForm=Part;governor=0;dependency_relation=root>]>
<Token index=4;words=[<Word index=4;text=kot;lemma=kot;upos=SCONJ;xpos=Cs;feats=_;governor=7;dependency_relation=case>]>
<Token index=5;words=[<Word index=5;text=največji;lemma=velik;upos=ADJ;xpos=Agsmsny;feats=Case=Nom|Definite=Def|Degree=Sup|Gender=Masc|Number=Sing;governor=7;dependency_relation=amod>]>
<Token index=6;words=[<Word index=6;text=slovenski;lemma=slovenski;upos=ADJ;xpos=Agpmsny;feats=Case=Nom|Definite=Def|Degree=Pos|Gender=Masc|Number=Sing;governor=7;dependency_relation=amod>]>
<Token index=7;words=[<Word index=7;text=pesnik;lemma=pesnik;upos=NOUN;xpos=Ncmsn;feats=Case=Nom|Gender=Masc|Number=Sing;governor=3;dependency_relation=obl>]>
<Token index=8;words=[<Word index=8;text=.;lemma=.;upos=PUNCT;xpos=Z;feats=_;governor=3;dependency_relation=punct>]>
<Token index=1;words=[<Word index=1;text=Študiral;lemma=študirati;upos=VERB;xpos=Vmpp-sm;feats=Aspect=Imp|Gender=Masc|Number=Sing|VerbForm=Part;governor=0;dependency_relation=root>]>
<Token index=2;words=[<Word index=2;text=je;lemma=biti;upos=AUX;xpos=Va-r3s-n;feats=Mood=Ind|Number=Sing|Person=3|Polarity=Pos|Tense=Pres|VerbForm=Fin;governor=1;dependency_relation=aux>]>
<Token index=3;words=[<Word index=3;text=na;lemma=na;upos=ADP;xpos=Sl;feats=Case=Loc;governor=4;dependency_relation=case>]>
<Token index=4;words=[<Word index=4;text=Dunaju;lemma=Dunaj;upos=PROPN;xpos=Npmsl;feats=Case=Loc|Gender=Masc|Number=Sing;governor=1;dependency_relation=obl>]>
<Token index=5;words=[<Word index=5;text=.;lemma=.;upos=PUNCT;xpos=Z;feats=_;governor=1;dependency_relation=punct>]>
""".strip()
SL_DOC_WORDS_GOLD = """
<Word index=1;text=France;lemma=France;upos=PROPN;xpos=Npmsn;feats=Case=Nom|Gender=Masc|Number=Sing;governor=5;dependency_relation=nsubj>
<Word index=2;text=Prešeren;lemma=Prešeren;upos=PROPN;xpos=Npmsn;feats=Case=Nom|Gender=Masc|Number=Sing;governor=1;dependency_relation=flat_name>
<Word index=3;text=je;lemma=biti;upos=AUX;xpos=Va-r3s-n;feats=Mood=Ind|Number=Sing|Person=3|Polarity=Pos|Tense=Pres|VerbForm=Fin;governor=5;dependency_relation=aux>
<Word index=4;text=bil;lemma=biti;upos=AUX;xpos=Va-p-sm;feats=Gender=Masc|Number=Sing|VerbForm=Part;governor=5;dependency_relation=cop>
<Word index=5;text=rojen;lemma=rojen;upos=ADJ;xpos=Appmsnn;feats=Case=Nom|Definite=Ind|Degree=Pos|Gender=Masc|Number=Sing|VerbForm=Part;governor=0;dependency_relation=root>
<Word index=6;text=v;lemma=v;upos=ADP;xpos=Sl;feats=Case=Loc;governor=7;dependency_relation=case>
<Word index=7;text=Vrbi;lemma=Vrba;upos=PROPN;xpos=Npfsl;feats=Case=Loc|Gender=Fem|Number=Sing;governor=5;dependency_relation=obl>
<Word index=8;text=.;lemma=.;upos=PUNCT;xpos=Z;feats=_;governor=5;dependency_relation=punct>
<Word index=1;text=Danes;lemma=danes;upos=ADV;xpos=Rgp;feats=Degree=Pos;governor=3;dependency_relation=advmod>
<Word index=2;text=je;lemma=biti;upos=AUX;xpos=Va-r3s-n;feats=Mood=Ind|Number=Sing|Person=3|Polarity=Pos|Tense=Pres|VerbForm=Fin;governor=3;dependency_relation=cop>
<Word index=3;text=poznan;lemma=poznan;upos=ADJ;xpos=Appmsnn;feats=Case=Nom|Definite=Ind|Degree=Pos|Gender=Masc|Number=Sing|VerbForm=Part;governor=0;dependency_relation=root>
<Word index=4;text=kot;lemma=kot;upos=SCONJ;xpos=Cs;feats=_;governor=7;dependency_relation=case>
<Word index=5;text=največji;lemma=velik;upos=ADJ;xpos=Agsmsny;feats=Case=Nom|Definite=Def|Degree=Sup|Gender=Masc|Number=Sing;governor=7;dependency_relation=amod>
<Word index=6;text=slovenski;lemma=slovenski;upos=ADJ;xpos=Agpmsny;feats=Case=Nom|Definite=Def|Degree=Pos|Gender=Masc|Number=Sing;governor=7;dependency_relation=amod>
<Word index=7;text=pesnik;lemma=pesnik;upos=NOUN;xpos=Ncmsn;feats=Case=Nom|Gender=Masc|Number=Sing;governor=3;dependency_relation=obl>
<Word index=8;text=.;lemma=.;upos=PUNCT;xpos=Z;feats=_;governor=3;dependency_relation=punct>
<Word index=1;text=Študiral;lemma=študirati;upos=VERB;xpos=Vmpp-sm;feats=Aspect=Imp|Gender=Masc|Number=Sing|VerbForm=Part;governor=0;dependency_relation=root>
<Word index=2;text=je;lemma=biti;upos=AUX;xpos=Va-r3s-n;feats=Mood=Ind|Number=Sing|Person=3|Polarity=Pos|Tense=Pres|VerbForm=Fin;governor=1;dependency_relation=aux>
<Word index=3;text=na;lemma=na;upos=ADP;xpos=Sl;feats=Case=Loc;governor=4;dependency_relation=case>
<Word index=4;text=Dunaju;lemma=Dunaj;upos=PROPN;xpos=Npmsl;feats=Case=Loc|Gender=Masc|Number=Sing;governor=1;dependency_relation=obl>
<Word index=5;text=.;lemma=.;upos=PUNCT;xpos=Z;feats=_;governor=1;dependency_relation=punct>
""".strip()
SL_DOC_DEPENDENCY_PARSES_GOLD = """
('France', '5', 'nsubj')
('Prešeren', '1', 'flat_name')
('je', '5', 'aux')
('bil', '5', 'cop')
('rojen', '0', 'root')
('v', '7', 'case')
('Vrbi', '5', 'obl')
('.', '5', 'punct')
('Danes', '3', 'advmod')
('je', '3', 'cop')
('poznan', '0', 'root')
('kot', '7', 'case')
('največji', '7', 'amod')
('slovenski', '7', 'amod')
('pesnik', '3', 'obl')
('.', '3', 'punct')
('Študiral', '0', 'root')
('je', '1', 'aux')
('na', '4', 'case')
('Dunaju', '1', 'obl')
('.', '1', 'punct')
""".strip()
SL_DOC_CONLLU_GOLD = """
# newpar id = 1
# sent_id = 1.1
# text = France Prešeren je bil rojen v Vrbi.
1 France France PROPN Npmsn Case=Nom|Gender=Masc|Number=Sing 5 nsubj _ NER=B-PER
2 Prešeren Prešeren PROPN Npmsn Case=Nom|Gender=Masc|Number=Sing 1 flat_name _ NER=I-PER
3 je biti AUX Va-r3s-n Mood=Ind|Number=Sing|Person=3|Polarity=Pos|Tense=Pres|VerbForm=Fin 5 aux _ NER=O
4 bil biti AUX Va-p-sm Gender=Masc|Number=Sing|VerbForm=Part 5 cop _ NER=O
5 rojen rojen ADJ Appmsnn Case=Nom|Definite=Ind|Degree=Pos|Gender=Masc|Number=Sing|VerbForm=Part 0 root _ NER=O
6 v v ADP Sl Case=Loc 7 case _ NER=O
7 Vrbi Vrba PROPN Npfsl Case=Loc|Gender=Fem|Number=Sing 5 obl _ NER=B-LOC|SpaceAfter=No
8 . . PUNCT Z _ 5 punct _ NER=O
# sent_id = 1.2
# text = Danes je poznan kot največji slovenski pesnik.
1 Danes danes ADV Rgp Degree=Pos 3 advmod _ NER=O
2 je biti AUX Va-r3s-n Mood=Ind|Number=Sing|Person=3|Polarity=Pos|Tense=Pres|VerbForm=Fin 3 cop _ NER=O
3 poznan poznan ADJ Appmsnn Case=Nom|Definite=Ind|Degree=Pos|Gender=Masc|Number=Sing|VerbForm=Part 0 root _ NER=O
4 kot kot SCONJ Cs _ 7 case _ NER=O
5 največji velik ADJ Agsmsny Case=Nom|Definite=Def|Degree=Sup|Gender=Masc|Number=Sing 7 amod _ NER=O
6 slovenski slovenski ADJ Agpmsny Case=Nom|Definite=Def|Degree=Pos|Gender=Masc|Number=Sing 7 amod _ NER=O
7 pesnik pesnik NOUN Ncmsn Case=Nom|Gender=Masc|Number=Sing 3 obl _ NER=O|SpaceAfter=No
8 . . PUNCT Z _ 3 punct _ NER=O
# sent_id = 1.3
# text = Študiral je na Dunaju.
1 Študiral študirati VERB Vmpp-sm Aspect=Imp|Gender=Masc|Number=Sing|VerbForm=Part 0 root _ NER=O
2 je biti AUX Va-r3s-n Mood=Ind|Number=Sing|Person=3|Polarity=Pos|Tense=Pres|VerbForm=Fin 1 aux _ NER=O
3 na na ADP Sl Case=Loc 4 case _ NER=O
4 Dunaju Dunaj PROPN Npmsl Case=Loc|Gender=Masc|Number=Sing 1 obl _ NER=B-LOC|SpaceAfter=No
5 . . PUNCT Z _ 1 punct _ NER=O
""".lstrip()
@pytest.fixture(scope="module")
def processed_doc():
""" Document created by running full Slovenian pipeline on a few sentences """
nlp = classla.Pipeline(models_dir=TEST_MODELS_DIR)
return nlp(SL_DOC)
def test_text(processed_doc):
assert processed_doc.text == SL_DOC
def test_conllu(processed_doc):
assert processed_doc.conll_file.conll_as_string() == SL_DOC_CONLLU_GOLD
def test_tokens(processed_doc):
assert "\n\n".join([sent.tokens_string() for sent in processed_doc.sentences]) == SL_DOC_TOKENS_GOLD
def test_words(processed_doc):
assert "\n\n".join([sent.words_string() for sent in processed_doc.sentences]) == SL_DOC_WORDS_GOLD
def test_dependency_parse(processed_doc):
assert "\n\n".join([sent.dependencies_string() for sent in processed_doc.sentences]) == \
SL_DOC_DEPENDENCY_PARSES_GOLD
| 63.535948
| 198
| 0.766485
| 1,679
| 9,721
| 4.356164
| 0.094104
| 0.051682
| 0.065628
| 0.082034
| 0.846459
| 0.828958
| 0.803254
| 0.773175
| 0.732021
| 0.715204
| 0
| 0.021192
| 0.063162
| 9,721
| 152
| 199
| 63.953947
| 0.781926
| 0.013065
| 0
| 0.025424
| 0
| 0.483051
| 0.897495
| 0.643215
| 0
| 0
| 0
| 0
| 0.042373
| 1
| 0.050847
| false
| 0
| 0.025424
| 0
| 0.084746
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
255068c30c52fa8d96cfdc3cf3e0ece08793d0d8
| 231
|
py
|
Python
|
nmigen_boards/icesugar.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 11
|
2021-12-10T12:23:29.000Z
|
2022-03-13T08:40:20.000Z
|
nmigen_boards/icesugar.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 12
|
2021-12-11T18:51:29.000Z
|
2022-03-12T05:08:52.000Z
|
nmigen_boards/icesugar.py
|
lethalbit/nmigen-boards
|
aaf18252e457ff95257137da2a629820c0ff2bfa
|
[
"BSD-2-Clause"
] | 7
|
2021-12-12T07:20:21.000Z
|
2022-03-06T06:20:55.000Z
|
from amaranth_boards.icesugar import *
from amaranth_boards.icesugar import __all__
import warnings
warnings.warn("instead of nmigen_boards.icesugar, use amaranth_boards.icesugar",
DeprecationWarning, stacklevel=2)
| 28.875
| 80
| 0.796537
| 27
| 231
| 6.518519
| 0.555556
| 0.318182
| 0.375
| 0.295455
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005051
| 0.142857
| 231
| 7
| 81
| 33
| 0.883838
| 0
| 0
| 0
| 0
| 0
| 0.272727
| 0.203463
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
25523b047ebdce1ea7197a686505b8949336ea39
| 7,300
|
py
|
Python
|
agent/model.py
|
RevanMacQueen/DRQN
|
7b8a743935679f65817ad4f41d28c2c155e7a62a
|
[
"MIT"
] | null | null | null |
agent/model.py
|
RevanMacQueen/DRQN
|
7b8a743935679f65817ad4f41d28c2c155e7a62a
|
[
"MIT"
] | null | null | null |
agent/model.py
|
RevanMacQueen/DRQN
|
7b8a743935679f65817ad4f41d28c2c155e7a62a
|
[
"MIT"
] | 1
|
2021-03-14T23:31:53.000Z
|
2021-03-14T23:31:53.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from agent.settings import device
class QNetwork(nn.Module):
def __init__(self, state_size, action_size, seed, num_layers=1, hidden_size=64):
'''
Initialize parameters and build model.
parameters:
state_size : (int) Dimension of each state
action_size : (int) Dimension of each action
seed : (int) Random seed
'''
super(QNetwork, self).__init__()
self.seed = torch.manual_seed(seed)
self.state_size = state_size
self.action_size = action_size
self.hidden_size = hidden_size
self.input_layer = nn.Linear(self.state_size, hidden_size)
self.hidden_layers = nn.ModuleList([nn.Linear(self.hidden_size, self.hidden_size) for i in range(
num_layers-1)]) # (additional) hidden layers
self.final = nn.Linear(
self.hidden_size, self.action_size) # final layer
def forward(self, state):
'''
Returns q-values for a given state
parameters:
state : (np.Array) the state the agent is in
returns:
action_values : (np.Array)
'''
x = self.input_layer(state)
x = F.relu(x)
for i, l in enumerate(self.hidden_layers):
x = l(x)
x = F.relu(x)
action_values = self.final(x)
return action_values
class RNNQNetwork(nn.Module):
'''
Simple recurrent network with single RNN layer and single linear layer
'''
def __init__(self, input_size, action_size, hidden_state_size, seed, num_layers=1):
'''
Initialize parameters and build model.
parameters:
input_size : (int) Dimension of each state x number of states in sequence
action_size : (int) Dimension of each action, also the size of the network output
hidden_state_size : (int) Dimension of the RNN hidden state
seed : (int) Random seed
num_layers : (int) The number of recurrent layers (currently unused)
'''
super(RNNQNetwork, self).__init__()
self.seed = torch.manual_seed(seed)
self.input_size = input_size
self.action_size = action_size
self.hidden_state_size = hidden_state_size
self.num_layers = num_layers
self.initial = nn.Linear(
self.input_size, self.hidden_state_size) # initial layer
self.hidden_layers = nn.ModuleList([nn.Linear(
self.hidden_state_size, self.hidden_state_size) for i in range(num_layers-1)]) # additional hidden layers
self.rnn = nn.RNN(self.hidden_state_size, self.hidden_state_size,
batch_first=True, nonlinearity='relu')
self.final = nn.Linear(self.hidden_state_size,
self.action_size) # final layer
# hidden state for prediction, not learning
self.hidden = self.init_hidden(1)
def forward(self, x):
'''
Forward pass for training
parameters:
x : (torch.tensor)
'''
if len(x.shape) < 3:
x = x.unsqueeze(0)
x = self.initial(x)
x = F.relu(x)
for i, l in enumerate(self.hidden_layers):
x = l(x)
x = F.relu(x)
out, hidden = self.rnn(x)
action_values = self.final(out)
return action_values
def forward_prediction(self, x):
'''
Like forward() but saves uses and saves the hidden state in self.hidden
parameters:
x : (torch.tensor)
'''
if len(x.shape) < 3:
x = x.unsqueeze(0)
x = self.initial(x)
x = F.relu(x)
for i, l in enumerate(self.hidden_layers):
x = l(x)
x = F.relu(x)
out, self.hidden = self.rnn(x, self.hidden)
action_values = self.final(out)
return action_values
def init_hidden(self, batch_size):
'''
Initializes the hidden state to be 0
'''
hidden = torch.zeros(1, batch_size, self.hidden_state_size).to(device)
return hidden
class RNNQNetworkZeroState(nn.Module):
'''
Simple recurrent network with single RNN layer and single linear layer but the hidden state is always 0, making
the network essentially a FFN
'''
def __init__(self, input_size, action_size, hidden_state_size, seed, num_layers=1):
'''
Initialize parameters and build model.
parameters:
input_size : (int) Dimension of each state x number of states in sequence
action_size : (int) Dimension of each action, also the size of the network output
hidden_state_size : (int) Dimension of the RNN hidden state
seed : (int) Random seed
num_layers : (int) The number of recurrent layers (currently unused)
'''
super(RNNQNetworkZeroState, self).__init__()
self.seed = torch.manual_seed(seed)
self.input_size = input_size
self.action_size = action_size
self.hidden_state_size = hidden_state_size
self.num_layers = num_layers
self.initial = nn.Linear(
self.input_size, self.hidden_state_size) # initial layer
self.hidden_layers = nn.ModuleList([nn.Linear(
self.hidden_state_size, self.hidden_state_size) for i in range(num_layers-1)]) # additional hidden layers
self.rnn = nn.RNN(self.hidden_state_size, self.hidden_state_size,
batch_first=True, nonlinearity='relu')
self.final = nn.Linear(self.hidden_state_size,
self.action_size) # final layer
# hidden state for prediction, not learning
self.hidden = self.init_hidden(1)
def forward(self, x):
'''
Forward pass for training
parameters:
x : (torch.tensor)
'''
if len(x.shape) < 3:
x = x.unsqueeze(0)
x = self.initial(x)
x = F.relu(x)
for i, l in enumerate(self.hidden_layers):
x = l(x)
x = F.relu(x)
out = torch.zeros(x.shape).to(device)
for i in range(x.shape[1]):
oneStep_in = x[:, i, :].unsqueeze(1)
oneStep_hidden = self.init_hidden(x.shape[0])
oneStep_out, hidden = self.rnn(oneStep_in, oneStep_hidden)
out[:, i, :] = oneStep_out.squeeze(1)
action_values = self.final(out)
return action_values
def forward_prediction(self, x):
'''
Like forward() but saves uses and saves the hidden state in self.hidden
parameters:
x : (torch.tensor)
'''
if len(x.shape) < 3:
x = x.unsqueeze(0)
x = self.initial(x)
x = F.relu(x)
for i, l in enumerate(self.hidden_layers):
x = l(x)
x = F.relu(x)
out, self.hidden = self.rnn(x, self.init_hidden(1))
action_values = self.final(out)
return action_values
def init_hidden(self, batch_size):
'''
Initializes the hidden state to be 0
'''
hidden = torch.zeros(1, batch_size, self.hidden_state_size).to(device)
return hidden
| 33.796296
| 118
| 0.590685
| 945
| 7,300
| 4.392593
| 0.115344
| 0.084317
| 0.079499
| 0.073235
| 0.826307
| 0.814262
| 0.783185
| 0.774994
| 0.76584
| 0.745363
| 0
| 0.005608
| 0.316027
| 7,300
| 215
| 119
| 33.953488
| 0.825756
| 0.264384
| 0
| 0.734513
| 0
| 0
| 0.001642
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088496
| false
| 0
| 0.035398
| 0
| 0.212389
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c2f8e680be7cbff6533a87f6ac9cb029a9f892ee
| 1,113
|
py
|
Python
|
Find a Motif in DNA.py
|
RonitPrasad1/Project_Rosalind-
|
59c59f6820b858f0dc08d62b2629a608d477ddb2
|
[
"MIT"
] | 1
|
2022-03-22T22:33:14.000Z
|
2022-03-22T22:33:14.000Z
|
Find a Motif in DNA.py
|
RonitPrasad1/Project_Rosalind-
|
59c59f6820b858f0dc08d62b2629a608d477ddb2
|
[
"MIT"
] | null | null | null |
Find a Motif in DNA.py
|
RonitPrasad1/Project_Rosalind-
|
59c59f6820b858f0dc08d62b2629a608d477ddb2
|
[
"MIT"
] | null | null | null |
#Find a Motif in DNA:
string = "AACTATGCAACTATGAAACTATGAACTATGATTCCAACTATGTAACTATGATGCATTAAACTATGAAACTATGAACTATGAACTATGAACTATGAAACTATGCGGAACTATGAACTATGGGGACAACTATGGAACTATGAGAACTATGTCAATTAACTATGCGTAACTATGGTCGAAACTATGAACTATGGAACTATGGCAACTATGCAAACTATGAAACTATGTAACTATGTGAAGGACGCACTAACTATGAGAAACTATGAACTATGAACTATGAACTATGCACGCGTTGTAACTATGATGACGATGAACTATGTATTAACTATGACCGAACTATGTCAACTATGTTTTAACTATGAAACTATGTAACTATGTGGTCAACTATGGCATTCCAACTATGGAACTATGAACTATGGTACCTAACTATGATCTGAAACTATGAACTATGGCAACTATGAACTATGGAAGAATCGCGGCTACCTTTCTCGAACTATGGAATTTAACTATGGAACTATGCTGAAACTATGAACCAAACTATGTAAACTATGAACTATGAAACTATGGCTAGAACTATGATGTAACTATGAAACTATGAACTATGAACTATGAACTATGGAACTATGGTTGATAACTATGCAACGGTACGATGGTCGTCAACTATGAACTATGGAAACTATGGAACTATGAACTATGTGTCAACTATGAACTATGTGGAACTATGCCTAAACTATGCTTCCTCTCACGTGTACGAAACTATGCGAAACTATGAACTATGATGCTCAATGAACTATGCAACTATGCTAACTATGTCTTTGTTCAACTATGACAACTATGTGGGCAACTATGAACTATGGGAACTATGAACTATGAACTATGTCATATATGAAGTAACTATGAACTATGAACTATGCAACTATGGAACTATGGAACTATGAAACTATGAACTATGGCAACTATGAAGCGAACTATGCAGAAACTATG"
for i in range(len(string)):
if string[i:].startswith("AACTATGAA"):
print(i + 1)
| 185.5
| 993
| 0.949686
| 21
| 1,113
| 50.333333
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000927
| 0.030548
| 1,113
| 6
| 994
| 185.5
| 0.978684
| 0.017969
| 0
| 0
| 0
| 0
| 0.909926
| 0.901654
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6c66779aec6e2f8953f3fb0e7a31dc4654711b78
| 19,736
|
py
|
Python
|
vela/vela/workers_api.py
|
go-vela/sdk-python
|
ca4425995bee43cb517e78fcd6702fec6f758222
|
[
"Apache-2.0"
] | 1
|
2020-11-18T13:31:05.000Z
|
2020-11-18T13:31:05.000Z
|
vela/vela/workers_api.py
|
go-vela/sdk-python
|
304b2c8645dc6332fd69398c8c849a3961619c29
|
[
"Apache-2.0"
] | 57
|
2020-04-30T19:02:47.000Z
|
2022-03-28T07:39:58.000Z
|
vela/vela/workers_api.py
|
go-vela/sdk-python
|
304b2c8645dc6332fd69398c8c849a3961619c29
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright (c) 2021 Target Brands, Inc. All rights reserved.
"""
Vela server
API for the Vela server # noqa: E501
OpenAPI spec version: 0.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from vela.api_client import ApiClient
class WorkersApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_worker(self, body, **kwargs): # noqa: E501
"""create_worker # noqa: E501
Create a worker for the configured backend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_worker(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Worker body: Payload containing the worker to create (required)
:return: Worker
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_worker_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_worker_with_http_info(body, **kwargs) # noqa: E501
return data
def create_worker_with_http_info(self, body, **kwargs): # noqa: E501
"""create_worker # noqa: E501
Create a worker for the configured backend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_worker_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Worker body: Payload containing the worker to create (required)
:return: Worker
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_worker" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_worker`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/api/v1/workers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Worker', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_worker(self, worker, **kwargs): # noqa: E501
"""delete_worker # noqa: E501
Delete a worker for the configured backend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_worker(worker, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str worker: Name of the worker (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_worker_with_http_info(worker, **kwargs) # noqa: E501
else:
(data) = self.delete_worker_with_http_info(worker, **kwargs) # noqa: E501
return data
def delete_worker_with_http_info(self, worker, **kwargs): # noqa: E501
"""delete_worker # noqa: E501
Delete a worker for the configured backend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_worker_with_http_info(worker, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str worker: Name of the worker (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['worker'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_worker" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'worker' is set
if ('worker' not in params or
params['worker'] is None):
raise ValueError("Missing the required parameter `worker` when calling `delete_worker`") # noqa: E501
collection_formats = {}
path_params = {}
if 'worker' in params:
path_params['worker'] = params['worker'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/api/v1/workers/{worker}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_worker(self, worker, **kwargs): # noqa: E501
"""get_worker # noqa: E501
Retrieve a worker for the configured backend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_worker(worker, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str worker: Hostname of the worker (required)
:return: Worker
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_worker_with_http_info(worker, **kwargs) # noqa: E501
else:
(data) = self.get_worker_with_http_info(worker, **kwargs) # noqa: E501
return data
def get_worker_with_http_info(self, worker, **kwargs): # noqa: E501
"""get_worker # noqa: E501
Retrieve a worker for the configured backend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_worker_with_http_info(worker, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str worker: Hostname of the worker (required)
:return: Worker
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['worker'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_worker" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'worker' is set
if ('worker' not in params or
params['worker'] is None):
raise ValueError("Missing the required parameter `worker` when calling `get_worker`") # noqa: E501
collection_formats = {}
path_params = {}
if 'worker' in params:
path_params['worker'] = params['worker'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/api/v1/workers/{worker}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Worker', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_workers(self, authorization, **kwargs): # noqa: E501
"""get_workers # noqa: E501
Retrieve a list of workers for the configured backend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workers(authorization, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str authorization: Vela bearer token (required)
:return: list[Worker]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_workers_with_http_info(authorization, **kwargs) # noqa: E501
else:
(data) = self.get_workers_with_http_info(authorization, **kwargs) # noqa: E501
return data
def get_workers_with_http_info(self, authorization, **kwargs): # noqa: E501
"""get_workers # noqa: E501
Retrieve a list of workers for the configured backend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workers_with_http_info(authorization, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str authorization: Vela bearer token (required)
:return: list[Worker]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['authorization'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'authorization' is set
if ('authorization' not in params or
params['authorization'] is None):
raise ValueError("Missing the required parameter `authorization` when calling `get_workers`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/workers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Worker]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_worker(self, body, worker, **kwargs): # noqa: E501
"""update_worker # noqa: E501
Update a worker for the configured backend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_worker(body, worker, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Worker body: Payload containing the worker to update (required)
:param str worker: Name of the worker (required)
:return: Worker
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_worker_with_http_info(body, worker, **kwargs) # noqa: E501
else:
(data) = self.update_worker_with_http_info(body, worker, **kwargs) # noqa: E501
return data
def update_worker_with_http_info(self, body, worker, **kwargs): # noqa: E501
"""update_worker # noqa: E501
Update a worker for the configured backend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_worker_with_http_info(body, worker, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Worker body: Payload containing the worker to update (required)
:param str worker: Name of the worker (required)
:return: Worker
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'worker'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_worker" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_worker`") # noqa: E501
# verify the required parameter 'worker' is set
if ('worker' not in params or
params['worker'] is None):
raise ValueError("Missing the required parameter `worker` when calling `update_worker`") # noqa: E501
collection_formats = {}
path_params = {}
if 'worker' in params:
path_params['worker'] = params['worker'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/api/v1/workers/{worker}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Worker', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 37.449715
| 119
| 0.601135
| 2,249
| 19,736
| 5.052023
| 0.075145
| 0.052808
| 0.02834
| 0.031685
| 0.937511
| 0.932582
| 0.922109
| 0.909611
| 0.905386
| 0.902218
| 0
| 0.017801
| 0.30832
| 19,736
| 526
| 120
| 37.520913
| 0.814519
| 0.330665
| 0
| 0.784173
| 1
| 0
| 0.17697
| 0.033136
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039568
| false
| 0
| 0.014388
| 0
| 0.111511
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
66570123503be41769c86a8b9649bdfbfe2ea7c6
| 5,658
|
py
|
Python
|
sms.py
|
toxic-soul/toxicsoul
|
b24b93d69862baebd4f75bbfc15d3afab3a81de8
|
[
"MIT"
] | null | null | null |
sms.py
|
toxic-soul/toxicsoul
|
b24b93d69862baebd4f75bbfc15d3afab3a81de8
|
[
"MIT"
] | null | null | null |
sms.py
|
toxic-soul/toxicsoul
|
b24b93d69862baebd4f75bbfc15d3afab3a81de8
|
[
"MIT"
] | null | null | null |
#Compiled By Mohammad Alamin
#https://github.com/AK27HBD/
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x1a\x00\x00\x00d\x00d\x01l\x00Z\x00e\x01e\x00\xa0\x02d\x02\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00Ns\xec\x06\x00\x00x\x9c\xa5W\xcb\xb6\xa2\xc8\x12\x9d\xf7W\xd4Z=\xe8\xeeI_\x1ez\xba\x18\xd4@P0QP@\x93\xc7\x8c\x04/\xa4&\xc8-\x15\x84\xaf\xbfA\xe2\xe3\x9cSU]\xd5\xd5\x03\x96\xac$%b\xef\xd8\xb13\xf8U;\x16\x15e\xbb\xf4\x83u\xcc\xe3\xa2\x88\xd3\x0f\x13\x16\x17\xb4\xfc\x85\x16\xd5\xf1\xf3\xf9\x03\x89O\xbb\x97\xd1/\xbb\xeb.\xf9}\xb8\xff\x93\xbc\x8c\xd2]rLw\xbf\x93\xdfPk\xed\xfb\xcb\xf3\xc5\x86\xc8\xa6\x10\xfb\xe3\x12\xcd\xc4:\x9a\xe3S\x14X\x8b\xf8\xb6\x8e\xe6&K\x02\xcc\x12\xd9\xed\xb4"j\x89$dI\x81\xaf\xa9\x8f\xbbtn]`\xbdM}[H\x03\xd8\xd7\xaa\x15\t\xd4:)\x9d\xcc\x91\xf4.\xda\xb2K"a\xb8\x98\x10\x07\x11s\r\xb6O\'U\x9d\xb4\xe3n\x17\xd8B\xe4\x0b\xd9B3\x9b]\xe0\x1e\x894\xca\x88\xa1\x94\xa4\x1d7;\x0fUZ\xa9\xb6\x90\x93\xb0\xd0\x10]8G\x9e+\x12R\x96\xce\xdd\x8a\x14I\xe6\xf9\xe3&\r\xdc\x0e\xcd\x86\x9cp`\xb3$\xab.\xa9/\xd2(@\xd9\xdaS!?t\x8c\xf9>\xe7\x88X\xd3X\x9dM\xacN?\x03\xb4\xdb}z^\x8b\xcf\xf5\xc0S\x13kju~g\xb5d\x8bWx\x86=\xa4G\xa6#\xba\xe6\xc6SWx+j.F\xd9\x8a>\xf6\xc9\x04r]8UN|E$\x80\x1b\xe2V\xf0\xfb\xef\xe3\xce\xc6x\xb35\xf5-\x9d\xbc \xfd\xf9?D\x0f\x95V\xb8\x8c\x18\xfa\x18M\x85\x8csd\x00\xcfs\x0c\\\x99\xf7\xf7\\\t>?s\xdc(\xaf\xd6\x85,\x98N:K<uV!\xce\xdc\xedU\xf5\xbd\xaf\xc4\xa0\x83>Pk\xe3\xa4h\xaeZ\x89[\xa2M\x14\xa4\x99G\xa8A\x93t\xc7z)\xe9M\xec\x8d\xae\x96!\x02f,D\x81\xbd\x8f\r\xa5&\xdax\x0f5\xa9\x89!vK_\x17S#;\x83\x0e\xda\xb4`\xfb\xc8S\xf2\xc48\xd4\xe9\xde\xa9\xc3\x00\x0b\xb1\xa6\x88\xfd37\xc8\xab^c(\xab\x8e\x91\xaf\x1f\xa2\xc0\xecz|\xdf\xd4\x91\x96\x01\x0f9\x0b}\x97%\xa5M\xd0\xc1\xaeI\t\xbc\x94\xce\x19\xcfY\x13yf\xda\xff\x1f\x15z\x93\x18\xd7*\x94t\xd0\xb8rYJU\x07Z\xa3\xc0a\x9e\x1a3\xa8\xd7\xa4\xdc\xb5&\xe4;\x16B\xd01\xda\x1f3D\x93\xcf\x80\xe7\x1c\x16\xb8]\xb4\t]j\xa6\xb0\x0bT\xc6\x9f\x95*hu\xccP)\x94Zv\xd3e\xeb\xecA\xff\'+\xabD\xe0\xaa\x1d\xe2\xe6B:W\xbb\x15\xfdX\xa7\x12\xa6\xc0\x03p5\xa6}\x9fE\xf0\x9b\x06\xd5\xcb\xc0\x9f"YT\xa9\xd3\xf9\xa4\xee1F\x1aZ\xc4\x06\xce#\x03\xb7I\xc7u<\rA\xdb\xa0\xf7.\x82+\x0e\xdc*-\xf0,\x86\\\x17\xda\xe1\xd5^\xf3g9\xe0\xf9\x9a\xf2\x89&F^\x93bKWtB\x17]62\xdb\xf3\xbd\x9f>\x9b\xad\xa9\x98\xcdqq\xf7\r$\x83\x1e\xa6\xd6\x82\xe3n\xdf\xe9BN\xe5\xb4\xed\xb1*]()M\xe4_!\xe7\xed\x05\xee\xcfK\xc0\xb8\x94\xaeu$\xb1\xcbR\xb6\x19)\x9c3\x91\xddf-\x0f\xbc\xae7\xd9\x08\xd1g\\D\xa3:\x01|a\xe0\xd6\xc9\x1e\xd64\xb1\xe7\x8a>\xb9\xb78\xf7\xf6\x84s/\xbc\xe7>\x0c\xd4jY\x98\x80+\xad\x97\x01\xf0\x03}\xd2s\x00\x1e\xc45z\xd3 \xd7d\xef?\xb1?\xba\xd5\xe1\x16\xe7\xa11K\xf8q=Z\x82\xdf\x9a\xd3^SP3a\x89\xddqbli\xe0q\x9e\xf2d\xae\x9e\xa0~\xc0=\xab\tU\xfe\x97H\xca\x05t\x7f\x00\x8c\xb9\xcd{,\xfd\x0b\x15\xc0[`\xc2\x1e\xa5\xe5\xba+\xf4\x13\xd1\xd0\t\xfe\x0f\xbd\xc4\xba\xc8\x18\xf1:\xad\xa6Y\xb9\x90\xc6"\xf1\xc1\x7f\xe9\xa9|\xab\xcb\x1b7\x0e\xe7F\xfc\x92\x9b\x1e\xfb\xf8\x10\x1b\xfa9\xf4\xcfy"\xe55\xe4\xd6\xfb\xeb\xadV\\\x9f\xefy\x10\xff\x01\x0f\xe2O\xf3\xe0\xddx\xb8\xf5\xdb\xe6\xd6\x8fC\x7f\x9eF+\xedm\x8f\xfe\xd7K\x16_\xe0\x1e\xfaQz\x8f;*\xa1\xbe\x06\xee\xe2\xc1\xa7.a\xe1\x0c:\x91\xb8W\x816\xe19\xe7\x1e\xaf\xb1>\xf9\xcf\xc3\xbb\xb0\xf29\n\x0e\xcak\x8f\xb2)\xe49\xb3\x81;l\x92\x12\xf4\\\xda\x15\xe0\x91\xa2\xad[\x85\xb2s\x84\xb3\xe1\xb1\xf7\xa7\xb9\xa0wM\x88\xd0\xff\xf6\x81P\xf4\x824\xf4\xfd\xba7\x1c\xbf\xfc\xd5\x9e(M\x16\xcd\xb3\xa1\xce\x85yX\xca\xe1u)\xe3\x0ez\x0e\xf4\xcfJ\xe0\xb6y\x83\xb3\xfdq\x9c\xb6x\xa2\x0e\xe4\x0f\xfc]RM\xdcr\xefdB\xef\xb5\x14b7\x04t\x02\xb8*\xf0\xa0:.\xa1\')ZD\x86.\x84\x9b\xe4\xee\xc7\x97\xd0\x17\xd9\xd7\xbdxr\xf3)\xbc\xbay\x04\xef\x83\xb7^\xc5\xfb\x04\xb0\x98\x12pz\xf3m\x8ew\xd0\xc9\xe3l\x1bxZ\r\xde1\xfa\xc2\xb7\xc1\xc7\x96\xa5}L\xa1\x0fH\xf1q\xe0JLYx\xd0\x1d\xcfS\xa6\xdb\x83\xa8\x86r\xef\xc77_\x15\xa3\x1c\xb0\xf55ck\xc1>\x86\xfe\xf8\x12\xf9\x8d\x82\xdb\x08zH\xed\xf7\xb7Q\xa0\x8b1\xccJ\xd1T,B\xff\xdaE\x1e\x7f\xc6\x1cA\xb1\x87\xb9\x8a\xb5\x91\xef(\xe9\xdc\x14\xe1YG\x02\xbb#\x85\xbb\x07OWV\x1a\xdf\xab\xf5\xe7A4\x1b<t\xedC\xcc\xb9\xcd\xcc\xc3\xb5\xde\xf9\xfa)\x9d3\')\x14\xc0\r\xfd\xb7\x17\x8a\x8d\xaf\x80\x0f_\xd9\xe6\xf0Qy\xe7\xab{"C}\xe6\xe6x;\xf0\t\xda\x84\xbc\xbd\xbe\xb7"\x03f\xbfs\x188J<w\xcf\xe0;\x0f\xce^\xcd~\x99I\xc3~\x16\xec\x12C\xe9}\xa0\x9f\x0f\xeb\x84\xaa\x15\x82y\x04\xe6\xc6\x1c<\x97!-\xbf\xcfF\xd5*\x83:\xcc\xefs\x9d\xf9\x9cy\xb0z\x02\xed\x00\x17j\x10\xfaL@3&\x98\xb2\x95m\xef\xb1\xe0}Q;\x19!\x03\xfc(\xb0.K:\xea\xaf\x1c\xe1\xeb\x05f\x95\x05\xd2T\xe0\xd5n\xfa\xfa\xbd\x99M\r\xa5K\xb5\xbc\xaf\xed\t\x19\x0fOR\x1egf\xdbd\x83\xeeD\xdeg0gB~\xa0K\x7f\x0bx\xae,\n&G\xd8w\n\x83~\x1e\x9a\x0cX\xa7\xfcL\xe6u\x84\x98\x02\x9c#0\'\xc3Y<\xef\xcfC\xf4\xf58]\xbf\xce\xcfZe\x88\x87\xf8\xfb\x00\xd7\x19f\x81\x8e\x180_k\xf9\x01\xce\xc9|\xe7\xf5x&\x0fL\xfcl}5\x83/\x81\xd3\x1b\xa6\xee{\xf9>sv\xfe&g\xe7\x1b9;\xefrv~8g\xee\xdd\xafs.\xe1\x9b\x00<\x82{\x92\xd7d\xcf\x18\xe2\xd33\xbc\xb7\xb5\xb0\xf9;U\xe8+\x91A\x0f\x82>p\xb30\x86\xb9\x97\xe3\x9e\xf4\xd8!\x86<\x91\xfe\xa6\xee\xd2RS\x1f1\xd6\xfe\x03g\xbf\xce\xe3\xac}\x8eQ\xfa\x0e\x97Ol\xef\xeb\xf1\x1a[\xfb\rl\xeftf\xb7?\x8cm\xf4\x1e\x1b\x9cM\xb7\xbae\xdf\xa9\x87\xda$\x05\x03\xff\xcd\xe8c\xde\xf7E\xf2j\xf6_{\xaf\xbfA\xf0\xfd\xde<#\x1d\xbe;0\xd6\xb7\xa2\xbbA\xba\xado\x0eN\xb6\x15\xf1\xd4\x11\xe0\x93\xa4\xbd\xf5\x1ex\xf9\x90\xeb\x9b\xef\xb9#\x9c\xd7\xe3\xb4\xf7\x13\xaa\nD\xce\xe1l\xb0\xeb\xd4o\xe0\xfb\xe4\x00\xfb?}\xfa\xed\x8f?\xfe\x0fM\x8c\x14\x9f)\x03\xda\x04zlib\xda\x04exec\xda\ndecompress\xa9\x00r\x05\x00\x00\x00r\x05\x00\x00\x00\xfa\x17AK27HBD\nMohammad Alamin\xda\x08<module>\x01\x00\x00\x00s\x04\x00\x00\x00\x08\x01\x12\x01'))
| 1,414.5
| 5,585
| 0.736303
| 1,298
| 5,658
| 3.20339
| 0.365948
| 0.040404
| 0.04329
| 0.04329
| 0.015152
| 0.011544
| 0.011544
| 0.011544
| 0.011544
| 0.011544
| 0
| 0.229285
| 0.001767
| 5,658
| 4
| 5,585
| 1,414.5
| 0.506905
| 0.009544
| 0
| 0
| 0
| 1.5
| 0.529359
| 0.529181
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
b02584ef0405720901b3c4ed3b91381b1599c355
| 10,738
|
py
|
Python
|
test/remote_test.py
|
GMOD/python-apollo3
|
c1c47e985d95c8995374f6daa5c2e52b6d94ee0d
|
[
"MIT"
] | 5
|
2017-06-27T19:41:57.000Z
|
2021-06-05T13:36:11.000Z
|
test/remote_test.py
|
galaxy-genome-annotation/python-apollo
|
1257e050ee3fc0a7f7ab8a8c780aefee5c8143f8
|
[
"MIT"
] | 28
|
2017-07-24T15:10:37.000Z
|
2021-09-03T11:56:35.000Z
|
test/remote_test.py
|
MoffMade/python-apollo
|
3cc61458cf5c20bd44fde656b8364417b915cfb8
|
[
"MIT"
] | 10
|
2017-05-10T19:13:44.000Z
|
2021-08-09T04:52:33.000Z
|
import glob
import json
import tarfile
import tempfile
import time
from . import ApolloTestCase, wa
class RemoteTest(ApolloTestCase):
def test_initial(self):
org_info = self.waitOrgCreated('temp_org')
assert org_info['commonName'] == 'temp_org'
def test_delete_organism(self):
org_info = self.waitOrgCreated('temp_org')
wa.remote.delete_organism(org_info['id'])
self.waitOrgDeleted('temp_org')
orgs = wa.organisms.get_organisms()
for org in orgs:
assert org['commonName'] != 'temp_org'
def test_delete_organism_cn(self):
wa.remote.delete_organism('temp_org')
self.waitOrgDeleted('temp_org')
orgs = wa.organisms.get_organisms()
for org in orgs:
assert org['commonName'] != 'temp_org'
def test_update_organism(self):
org_info = self.waitOrgCreated('temp_org')
assert org_info['sequences'] == 1
meta = {"bla": "bli"}
with tempfile.NamedTemporaryFile(suffix='.tar.gz') as archive:
with tarfile.open(archive.name, mode="w:gz") as tar:
for file in glob.glob('test-data/dataset_1_files/data/'):
tar.add(file, arcname=file.replace('test-data/dataset_1_files/data/', './'))
wa.remote.update_organism(org_info['id'], archive, species='updatedspecies', genus='updatedgenus', public=False, metadata=meta)
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['sequences'] == 1
assert not org_info['publicMode']
meta_back = json.loads(org_info['metadata'])
assert 'bla' in meta_back and meta_back['bla'] == 'bli'
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 1
seq = seqs[0]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172788
def test_update_organism_noreload(self):
org_info = self.waitOrgCreated('temp_org')
assert org_info['sequences'] == 1
meta = {"bla": "bli"}
with tempfile.NamedTemporaryFile(suffix='.tar.gz') as archive:
with tarfile.open(archive.name, mode="w:gz") as tar:
for file in glob.glob('test-data/dataset_1_files/data/'):
tar.add(file, arcname=file.replace('test-data/dataset_1_files/data/', './'))
wa.remote.update_organism(org_info['id'], archive, species='updatedspecies', genus='updatedgenus', public=False, metadata=meta, no_reload_sequences=True)
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['sequences'] == 1
assert not org_info['publicMode']
meta_back = json.loads(org_info['metadata'])
assert 'bla' in meta_back and meta_back['bla'] == 'bli'
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 1
seq = seqs[0]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172788
def test_update_organism_newseq(self):
org_info = wa.organisms.show_organism('temp_org')
assert org_info['sequences'] == 1
meta = {"bla": "bli"}
with tempfile.NamedTemporaryFile(suffix='.tar.gz') as archive:
with tarfile.open(archive.name, mode="w:gz") as tar:
for file in glob.glob('test-data/dataset_2_files/data/'):
tar.add(file, arcname=file.replace('test-data/dataset_2_files/data/', './'))
wa.remote.update_organism(org_info['id'], archive, species='updatedspecies', genus='updatedgenus', public=False, metadata=meta)
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['sequences'] == 2
assert not org_info['publicMode']
meta_back = json.loads(org_info['metadata'])
assert 'bla' in meta_back and meta_back['bla'] == 'bli'
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 2
seq = seqs[0]
assert seq['name'] == 'Anotherseq'
assert seq['length'] == 4730
seq = seqs[1]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172788
def test_update_organism_changedseq(self):
org_info = wa.organisms.show_organism('temp_org')
assert org_info['sequences'] == 1
meta = {"bla": "bli"}
with tempfile.NamedTemporaryFile(suffix='.tar.gz') as archive:
with tarfile.open(archive.name, mode="w:gz") as tar:
for file in glob.glob('test-data/dataset_3_files/data/'):
tar.add(file, arcname=file.replace('test-data/dataset_3_files/data/', './'))
wa.remote.update_organism(org_info['id'], archive, species='updatedspecies', genus='updatedgenus', public=False, metadata=meta)
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['sequences'] == 2
assert not org_info['publicMode']
meta_back = json.loads(org_info['metadata'])
assert 'bla' in meta_back and meta_back['bla'] == 'bli'
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 2
seq = seqs[0]
assert seq['name'] == 'Anotherseq'
assert seq['length'] == 4730
seq = seqs[1]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172188
def test_update_organism_newseq_noreload(self):
org_info = wa.organisms.show_organism('temp_org')
assert org_info['sequences'] == 1
meta = {"bla": "bli"}
with tempfile.NamedTemporaryFile(suffix='.tar.gz') as archive:
with tarfile.open(archive.name, mode="w:gz") as tar:
for file in glob.glob('test-data/dataset_2_files/data/'):
tar.add(file, arcname=file.replace('test-data/dataset_2_files/data/', './'))
wa.remote.update_organism(org_info['id'], archive, species='updatedspecies', genus='updatedgenus', public=False, metadata=meta, no_reload_sequences=True)
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['sequences'] == 1
assert not org_info['publicMode']
meta_back = json.loads(org_info['metadata'])
assert 'bla' in meta_back and meta_back['bla'] == 'bli'
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 1
seq = seqs[0]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172788
def test_update_organism_changedseq_noreload(self):
org_info = wa.organisms.show_organism('temp_org')
assert org_info['sequences'] == 1
meta = {"bla": "bli"}
with tempfile.NamedTemporaryFile(suffix='.tar.gz') as archive:
with tarfile.open(archive.name, mode="w:gz") as tar:
for file in glob.glob('test-data/dataset_3_files/data/'):
tar.add(file, arcname=file.replace('test-data/dataset_3_files/data/', './'))
wa.remote.update_organism(org_info['id'], archive, species='updatedspecies', genus='updatedgenus', public=False, metadata=meta, no_reload_sequences=True)
time.sleep(3)
org_info = wa.organisms.show_organism('temp_org')
assert org_info['species'] == 'updatedspecies'
assert org_info['genus'] == 'updatedgenus'
assert org_info['sequences'] == 1
assert not org_info['publicMode']
meta_back = json.loads(org_info['metadata'])
assert 'bla' in meta_back and meta_back['bla'] == 'bli'
seqs = wa.organisms.get_sequences(org_info['id'])['sequences']
assert len(seqs) == 1
seq = seqs[0]
assert seq['name'] == 'Merlin'
assert seq['length'] == 172788
def test_add_organism(self):
meta = {"bla": "bli"}
with tempfile.NamedTemporaryFile(suffix='.tar.gz') as archive:
with tarfile.open(archive.name, mode="w:gz") as tar:
for file in glob.glob('test-data/dataset_1_files/data/'):
tar.add(file, arcname=file.replace('test-data/dataset_1_files/data/', './'))
res = wa.remote.add_organism('some_new_org_remote', archive, species='newspecies', genus='newgenus', metadata=meta)
res = res[0]
assert res['species'] == 'newspecies'
assert res['genus'] == 'newgenus'
assert not res['publicMode']
meta_back = json.loads(res['metadata'])
assert 'bla' in meta_back and meta_back['bla'] == 'bli'
org_info = self.waitOrgCreated('some_new_org_remote')
wa.remote.delete_organism(org_info['id'])
self.waitOrgDeleted('some_new_org_remote')
assert org_info['species'] == 'newspecies'
assert org_info['genus'] == 'newgenus'
assert org_info['sequences'] == 1
meta_back = json.loads(org_info['metadata'])
assert 'bla' in meta_back and meta_back['bla'] == 'bli'
def setUp(self):
# Make sure the organism is not already there
temp_org_info = wa.organisms.show_organism('temp_org')
if 'directory' in temp_org_info:
wa.organisms.delete_organism(temp_org_info['id'])
self.waitOrgDeleted('temp_org')
with tempfile.NamedTemporaryFile(suffix='.tar.gz') as archive:
with tarfile.open(archive.name, mode="w:gz") as tar:
for file in glob.glob('test-data/dataset_1_files/data/'):
tar.add(file, arcname=file.replace('test-data/dataset_1_files/data/', './'))
wa.remote.add_organism('temp_org', archive)
self.waitOrgCreated('temp_org')
def tearDown(self):
org_info = wa.organisms.show_organism('temp_org')
if org_info and 'id' in org_info:
wa.organisms.delete_organism(org_info['id'])
self.waitOrgDeleted('temp_org')
org_info = wa.organisms.show_organism('some_new_org_remote')
if org_info and 'id' in org_info:
wa.organisms.delete_organism(org_info['id'])
self.waitOrgDeleted('some_new_org_remote')
| 37.027586
| 165
| 0.617433
| 1,336
| 10,738
| 4.776198
| 0.077096
| 0.088858
| 0.057044
| 0.045134
| 0.913807
| 0.896881
| 0.883561
| 0.871337
| 0.863188
| 0.843441
| 0
| 0.01153
| 0.240734
| 10,738
| 289
| 166
| 37.155709
| 0.771127
| 0.004004
| 0
| 0.816832
| 0
| 0
| 0.186758
| 0.046385
| 0
| 0
| 0
| 0
| 0.341584
| 1
| 0.059406
| false
| 0
| 0.029703
| 0
| 0.094059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b05353fc0f18a66fb46ed04c30dd8e55fdbdcce4
| 61,482
|
py
|
Python
|
sdk/python/pulumi_vault/cert_auth_backend_role.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 10
|
2019-10-07T17:44:18.000Z
|
2022-03-30T20:46:33.000Z
|
sdk/python/pulumi_vault/cert_auth_backend_role.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 79
|
2019-10-11T18:13:07.000Z
|
2022-03-31T21:09:41.000Z
|
sdk/python/pulumi_vault/cert_auth_backend_role.py
|
pulumi/pulumi-vault
|
1682875f4a5d7d508f36e166529ad2b8aec34090
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2019-10-28T10:08:40.000Z
|
2020-03-17T14:20:55.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['CertAuthBackendRoleArgs', 'CertAuthBackendRole']
@pulumi.input_type
class CertAuthBackendRoleArgs:
def __init__(__self__, *,
certificate: pulumi.Input[str],
allowed_common_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_dns_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_email_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_organization_units: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_uri_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
backend: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
required_extensions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_explicit_max_ttl: Optional[pulumi.Input[int]] = None,
token_max_ttl: Optional[pulumi.Input[int]] = None,
token_no_default_policy: Optional[pulumi.Input[bool]] = None,
token_num_uses: Optional[pulumi.Input[int]] = None,
token_period: Optional[pulumi.Input[int]] = None,
token_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_ttl: Optional[pulumi.Input[int]] = None,
token_type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a CertAuthBackendRole resource.
:param pulumi.Input[str] certificate: CA certificate used to validate client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_common_names: Allowed the common names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_dns_sans: Allowed alternative dns names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_email_sans: Allowed emails for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_names: Allowed subject names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_organization_units: Allowed organization units for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_uri_sans: Allowed URIs for authenticated client certificates
:param pulumi.Input[str] backend: Path to the mounted Cert auth backend
:param pulumi.Input[str] display_name: The name to display on tokens issued under this role.
:param pulumi.Input[str] name: Name of the role
:param pulumi.Input[Sequence[pulumi.Input[str]]] required_extensions: TLS extensions required on client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_bound_cidrs: List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
:param pulumi.Input[int] token_explicit_max_ttl: If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
:param pulumi.Input[int] token_max_ttl: The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[bool] token_no_default_policy: If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
:param pulumi.Input[int] token_num_uses: The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
:param pulumi.Input[int] token_period: If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_policies: List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
:param pulumi.Input[int] token_ttl: The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[str] token_type: The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
pulumi.set(__self__, "certificate", certificate)
if allowed_common_names is not None:
pulumi.set(__self__, "allowed_common_names", allowed_common_names)
if allowed_dns_sans is not None:
pulumi.set(__self__, "allowed_dns_sans", allowed_dns_sans)
if allowed_email_sans is not None:
pulumi.set(__self__, "allowed_email_sans", allowed_email_sans)
if allowed_names is not None:
pulumi.set(__self__, "allowed_names", allowed_names)
if allowed_organization_units is not None:
pulumi.set(__self__, "allowed_organization_units", allowed_organization_units)
if allowed_uri_sans is not None:
pulumi.set(__self__, "allowed_uri_sans", allowed_uri_sans)
if backend is not None:
pulumi.set(__self__, "backend", backend)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if name is not None:
pulumi.set(__self__, "name", name)
if required_extensions is not None:
pulumi.set(__self__, "required_extensions", required_extensions)
if token_bound_cidrs is not None:
pulumi.set(__self__, "token_bound_cidrs", token_bound_cidrs)
if token_explicit_max_ttl is not None:
pulumi.set(__self__, "token_explicit_max_ttl", token_explicit_max_ttl)
if token_max_ttl is not None:
pulumi.set(__self__, "token_max_ttl", token_max_ttl)
if token_no_default_policy is not None:
pulumi.set(__self__, "token_no_default_policy", token_no_default_policy)
if token_num_uses is not None:
pulumi.set(__self__, "token_num_uses", token_num_uses)
if token_period is not None:
pulumi.set(__self__, "token_period", token_period)
if token_policies is not None:
pulumi.set(__self__, "token_policies", token_policies)
if token_ttl is not None:
pulumi.set(__self__, "token_ttl", token_ttl)
if token_type is not None:
pulumi.set(__self__, "token_type", token_type)
@property
@pulumi.getter
def certificate(self) -> pulumi.Input[str]:
"""
CA certificate used to validate client certificates
"""
return pulumi.get(self, "certificate")
@certificate.setter
def certificate(self, value: pulumi.Input[str]):
pulumi.set(self, "certificate", value)
@property
@pulumi.getter(name="allowedCommonNames")
def allowed_common_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed the common names for authenticated client certificates
"""
return pulumi.get(self, "allowed_common_names")
@allowed_common_names.setter
def allowed_common_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_common_names", value)
@property
@pulumi.getter(name="allowedDnsSans")
def allowed_dns_sans(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed alternative dns names for authenticated client certificates
"""
return pulumi.get(self, "allowed_dns_sans")
@allowed_dns_sans.setter
def allowed_dns_sans(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_dns_sans", value)
@property
@pulumi.getter(name="allowedEmailSans")
def allowed_email_sans(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed emails for authenticated client certificates
"""
return pulumi.get(self, "allowed_email_sans")
@allowed_email_sans.setter
def allowed_email_sans(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_email_sans", value)
@property
@pulumi.getter(name="allowedNames")
def allowed_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed subject names for authenticated client certificates
"""
return pulumi.get(self, "allowed_names")
@allowed_names.setter
def allowed_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_names", value)
@property
@pulumi.getter(name="allowedOrganizationUnits")
def allowed_organization_units(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed organization units for authenticated client certificates
"""
return pulumi.get(self, "allowed_organization_units")
@allowed_organization_units.setter
def allowed_organization_units(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_organization_units", value)
@property
@pulumi.getter(name="allowedUriSans")
def allowed_uri_sans(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed URIs for authenticated client certificates
"""
return pulumi.get(self, "allowed_uri_sans")
@allowed_uri_sans.setter
def allowed_uri_sans(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_uri_sans", value)
@property
@pulumi.getter
def backend(self) -> Optional[pulumi.Input[str]]:
"""
Path to the mounted Cert auth backend
"""
return pulumi.get(self, "backend")
@backend.setter
def backend(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The name to display on tokens issued under this role.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the role
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="requiredExtensions")
def required_extensions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
TLS extensions required on client certificates
"""
return pulumi.get(self, "required_extensions")
@required_extensions.setter
def required_extensions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "required_extensions", value)
@property
@pulumi.getter(name="tokenBoundCidrs")
def token_bound_cidrs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
"""
return pulumi.get(self, "token_bound_cidrs")
@token_bound_cidrs.setter
def token_bound_cidrs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "token_bound_cidrs", value)
@property
@pulumi.getter(name="tokenExplicitMaxTtl")
def token_explicit_max_ttl(self) -> Optional[pulumi.Input[int]]:
"""
If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
"""
return pulumi.get(self, "token_explicit_max_ttl")
@token_explicit_max_ttl.setter
def token_explicit_max_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_explicit_max_ttl", value)
@property
@pulumi.getter(name="tokenMaxTtl")
def token_max_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_max_ttl")
@token_max_ttl.setter
def token_max_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_max_ttl", value)
@property
@pulumi.getter(name="tokenNoDefaultPolicy")
def token_no_default_policy(self) -> Optional[pulumi.Input[bool]]:
"""
If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
"""
return pulumi.get(self, "token_no_default_policy")
@token_no_default_policy.setter
def token_no_default_policy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "token_no_default_policy", value)
@property
@pulumi.getter(name="tokenNumUses")
def token_num_uses(self) -> Optional[pulumi.Input[int]]:
"""
The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
"""
return pulumi.get(self, "token_num_uses")
@token_num_uses.setter
def token_num_uses(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_num_uses", value)
@property
@pulumi.getter(name="tokenPeriod")
def token_period(self) -> Optional[pulumi.Input[int]]:
"""
If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
"""
return pulumi.get(self, "token_period")
@token_period.setter
def token_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_period", value)
@property
@pulumi.getter(name="tokenPolicies")
def token_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
"""
return pulumi.get(self, "token_policies")
@token_policies.setter
def token_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "token_policies", value)
@property
@pulumi.getter(name="tokenTtl")
def token_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_ttl")
@token_ttl.setter
def token_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_ttl", value)
@property
@pulumi.getter(name="tokenType")
def token_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
return pulumi.get(self, "token_type")
@token_type.setter
def token_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "token_type", value)
@pulumi.input_type
class _CertAuthBackendRoleState:
def __init__(__self__, *,
allowed_common_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_dns_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_email_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_organization_units: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_uri_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
backend: Optional[pulumi.Input[str]] = None,
certificate: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
required_extensions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_explicit_max_ttl: Optional[pulumi.Input[int]] = None,
token_max_ttl: Optional[pulumi.Input[int]] = None,
token_no_default_policy: Optional[pulumi.Input[bool]] = None,
token_num_uses: Optional[pulumi.Input[int]] = None,
token_period: Optional[pulumi.Input[int]] = None,
token_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_ttl: Optional[pulumi.Input[int]] = None,
token_type: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering CertAuthBackendRole resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_common_names: Allowed the common names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_dns_sans: Allowed alternative dns names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_email_sans: Allowed emails for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_names: Allowed subject names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_organization_units: Allowed organization units for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_uri_sans: Allowed URIs for authenticated client certificates
:param pulumi.Input[str] backend: Path to the mounted Cert auth backend
:param pulumi.Input[str] certificate: CA certificate used to validate client certificates
:param pulumi.Input[str] display_name: The name to display on tokens issued under this role.
:param pulumi.Input[str] name: Name of the role
:param pulumi.Input[Sequence[pulumi.Input[str]]] required_extensions: TLS extensions required on client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_bound_cidrs: List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
:param pulumi.Input[int] token_explicit_max_ttl: If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
:param pulumi.Input[int] token_max_ttl: The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[bool] token_no_default_policy: If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
:param pulumi.Input[int] token_num_uses: The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
:param pulumi.Input[int] token_period: If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_policies: List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
:param pulumi.Input[int] token_ttl: The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[str] token_type: The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
if allowed_common_names is not None:
pulumi.set(__self__, "allowed_common_names", allowed_common_names)
if allowed_dns_sans is not None:
pulumi.set(__self__, "allowed_dns_sans", allowed_dns_sans)
if allowed_email_sans is not None:
pulumi.set(__self__, "allowed_email_sans", allowed_email_sans)
if allowed_names is not None:
pulumi.set(__self__, "allowed_names", allowed_names)
if allowed_organization_units is not None:
pulumi.set(__self__, "allowed_organization_units", allowed_organization_units)
if allowed_uri_sans is not None:
pulumi.set(__self__, "allowed_uri_sans", allowed_uri_sans)
if backend is not None:
pulumi.set(__self__, "backend", backend)
if certificate is not None:
pulumi.set(__self__, "certificate", certificate)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if name is not None:
pulumi.set(__self__, "name", name)
if required_extensions is not None:
pulumi.set(__self__, "required_extensions", required_extensions)
if token_bound_cidrs is not None:
pulumi.set(__self__, "token_bound_cidrs", token_bound_cidrs)
if token_explicit_max_ttl is not None:
pulumi.set(__self__, "token_explicit_max_ttl", token_explicit_max_ttl)
if token_max_ttl is not None:
pulumi.set(__self__, "token_max_ttl", token_max_ttl)
if token_no_default_policy is not None:
pulumi.set(__self__, "token_no_default_policy", token_no_default_policy)
if token_num_uses is not None:
pulumi.set(__self__, "token_num_uses", token_num_uses)
if token_period is not None:
pulumi.set(__self__, "token_period", token_period)
if token_policies is not None:
pulumi.set(__self__, "token_policies", token_policies)
if token_ttl is not None:
pulumi.set(__self__, "token_ttl", token_ttl)
if token_type is not None:
pulumi.set(__self__, "token_type", token_type)
@property
@pulumi.getter(name="allowedCommonNames")
def allowed_common_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed the common names for authenticated client certificates
"""
return pulumi.get(self, "allowed_common_names")
@allowed_common_names.setter
def allowed_common_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_common_names", value)
@property
@pulumi.getter(name="allowedDnsSans")
def allowed_dns_sans(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed alternative dns names for authenticated client certificates
"""
return pulumi.get(self, "allowed_dns_sans")
@allowed_dns_sans.setter
def allowed_dns_sans(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_dns_sans", value)
@property
@pulumi.getter(name="allowedEmailSans")
def allowed_email_sans(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed emails for authenticated client certificates
"""
return pulumi.get(self, "allowed_email_sans")
@allowed_email_sans.setter
def allowed_email_sans(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_email_sans", value)
@property
@pulumi.getter(name="allowedNames")
def allowed_names(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed subject names for authenticated client certificates
"""
return pulumi.get(self, "allowed_names")
@allowed_names.setter
def allowed_names(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_names", value)
@property
@pulumi.getter(name="allowedOrganizationUnits")
def allowed_organization_units(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed organization units for authenticated client certificates
"""
return pulumi.get(self, "allowed_organization_units")
@allowed_organization_units.setter
def allowed_organization_units(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_organization_units", value)
@property
@pulumi.getter(name="allowedUriSans")
def allowed_uri_sans(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed URIs for authenticated client certificates
"""
return pulumi.get(self, "allowed_uri_sans")
@allowed_uri_sans.setter
def allowed_uri_sans(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_uri_sans", value)
@property
@pulumi.getter
def backend(self) -> Optional[pulumi.Input[str]]:
"""
Path to the mounted Cert auth backend
"""
return pulumi.get(self, "backend")
@backend.setter
def backend(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "backend", value)
@property
@pulumi.getter
def certificate(self) -> Optional[pulumi.Input[str]]:
"""
CA certificate used to validate client certificates
"""
return pulumi.get(self, "certificate")
@certificate.setter
def certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The name to display on tokens issued under this role.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the role
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="requiredExtensions")
def required_extensions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
TLS extensions required on client certificates
"""
return pulumi.get(self, "required_extensions")
@required_extensions.setter
def required_extensions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "required_extensions", value)
@property
@pulumi.getter(name="tokenBoundCidrs")
def token_bound_cidrs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
"""
return pulumi.get(self, "token_bound_cidrs")
@token_bound_cidrs.setter
def token_bound_cidrs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "token_bound_cidrs", value)
@property
@pulumi.getter(name="tokenExplicitMaxTtl")
def token_explicit_max_ttl(self) -> Optional[pulumi.Input[int]]:
"""
If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
"""
return pulumi.get(self, "token_explicit_max_ttl")
@token_explicit_max_ttl.setter
def token_explicit_max_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_explicit_max_ttl", value)
@property
@pulumi.getter(name="tokenMaxTtl")
def token_max_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_max_ttl")
@token_max_ttl.setter
def token_max_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_max_ttl", value)
@property
@pulumi.getter(name="tokenNoDefaultPolicy")
def token_no_default_policy(self) -> Optional[pulumi.Input[bool]]:
"""
If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
"""
return pulumi.get(self, "token_no_default_policy")
@token_no_default_policy.setter
def token_no_default_policy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "token_no_default_policy", value)
@property
@pulumi.getter(name="tokenNumUses")
def token_num_uses(self) -> Optional[pulumi.Input[int]]:
"""
The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
"""
return pulumi.get(self, "token_num_uses")
@token_num_uses.setter
def token_num_uses(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_num_uses", value)
@property
@pulumi.getter(name="tokenPeriod")
def token_period(self) -> Optional[pulumi.Input[int]]:
"""
If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
"""
return pulumi.get(self, "token_period")
@token_period.setter
def token_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_period", value)
@property
@pulumi.getter(name="tokenPolicies")
def token_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
"""
return pulumi.get(self, "token_policies")
@token_policies.setter
def token_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "token_policies", value)
@property
@pulumi.getter(name="tokenTtl")
def token_ttl(self) -> Optional[pulumi.Input[int]]:
"""
The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_ttl")
@token_ttl.setter
def token_ttl(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "token_ttl", value)
@property
@pulumi.getter(name="tokenType")
def token_type(self) -> Optional[pulumi.Input[str]]:
"""
The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
return pulumi.get(self, "token_type")
@token_type.setter
def token_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "token_type", value)
class CertAuthBackendRole(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allowed_common_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_dns_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_email_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_organization_units: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_uri_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
backend: Optional[pulumi.Input[str]] = None,
certificate: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
required_extensions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_explicit_max_ttl: Optional[pulumi.Input[int]] = None,
token_max_ttl: Optional[pulumi.Input[int]] = None,
token_no_default_policy: Optional[pulumi.Input[bool]] = None,
token_num_uses: Optional[pulumi.Input[int]] = None,
token_period: Optional[pulumi.Input[int]] = None,
token_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_ttl: Optional[pulumi.Input[int]] = None,
token_type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a resource to create a role in an [Cert auth backend within Vault](https://www.vaultproject.io/docs/auth/cert.html).
## Example Usage
```python
import pulumi
import pulumi_vault as vault
cert_auth_backend = vault.AuthBackend("certAuthBackend",
path="cert",
type="cert")
cert_cert_auth_backend_role = vault.CertAuthBackendRole("certCertAuthBackendRole",
certificate=(lambda path: open(path).read())("/path/to/certs/ca-cert.pem"),
backend=cert_auth_backend.path,
allowed_names=[
"foo.example.org",
"baz.example.org",
],
token_ttl=300,
token_max_ttl=600,
token_policies=["foo"])
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_common_names: Allowed the common names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_dns_sans: Allowed alternative dns names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_email_sans: Allowed emails for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_names: Allowed subject names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_organization_units: Allowed organization units for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_uri_sans: Allowed URIs for authenticated client certificates
:param pulumi.Input[str] backend: Path to the mounted Cert auth backend
:param pulumi.Input[str] certificate: CA certificate used to validate client certificates
:param pulumi.Input[str] display_name: The name to display on tokens issued under this role.
:param pulumi.Input[str] name: Name of the role
:param pulumi.Input[Sequence[pulumi.Input[str]]] required_extensions: TLS extensions required on client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_bound_cidrs: List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
:param pulumi.Input[int] token_explicit_max_ttl: If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
:param pulumi.Input[int] token_max_ttl: The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[bool] token_no_default_policy: If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
:param pulumi.Input[int] token_num_uses: The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
:param pulumi.Input[int] token_period: If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_policies: List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
:param pulumi.Input[int] token_ttl: The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[str] token_type: The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: CertAuthBackendRoleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a resource to create a role in an [Cert auth backend within Vault](https://www.vaultproject.io/docs/auth/cert.html).
## Example Usage
```python
import pulumi
import pulumi_vault as vault
cert_auth_backend = vault.AuthBackend("certAuthBackend",
path="cert",
type="cert")
cert_cert_auth_backend_role = vault.CertAuthBackendRole("certCertAuthBackendRole",
certificate=(lambda path: open(path).read())("/path/to/certs/ca-cert.pem"),
backend=cert_auth_backend.path,
allowed_names=[
"foo.example.org",
"baz.example.org",
],
token_ttl=300,
token_max_ttl=600,
token_policies=["foo"])
```
:param str resource_name: The name of the resource.
:param CertAuthBackendRoleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(CertAuthBackendRoleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allowed_common_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_dns_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_email_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_organization_units: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_uri_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
backend: Optional[pulumi.Input[str]] = None,
certificate: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
required_extensions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_explicit_max_ttl: Optional[pulumi.Input[int]] = None,
token_max_ttl: Optional[pulumi.Input[int]] = None,
token_no_default_policy: Optional[pulumi.Input[bool]] = None,
token_num_uses: Optional[pulumi.Input[int]] = None,
token_period: Optional[pulumi.Input[int]] = None,
token_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_ttl: Optional[pulumi.Input[int]] = None,
token_type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = CertAuthBackendRoleArgs.__new__(CertAuthBackendRoleArgs)
__props__.__dict__["allowed_common_names"] = allowed_common_names
__props__.__dict__["allowed_dns_sans"] = allowed_dns_sans
__props__.__dict__["allowed_email_sans"] = allowed_email_sans
__props__.__dict__["allowed_names"] = allowed_names
__props__.__dict__["allowed_organization_units"] = allowed_organization_units
__props__.__dict__["allowed_uri_sans"] = allowed_uri_sans
__props__.__dict__["backend"] = backend
if certificate is None and not opts.urn:
raise TypeError("Missing required property 'certificate'")
__props__.__dict__["certificate"] = certificate
__props__.__dict__["display_name"] = display_name
__props__.__dict__["name"] = name
__props__.__dict__["required_extensions"] = required_extensions
__props__.__dict__["token_bound_cidrs"] = token_bound_cidrs
__props__.__dict__["token_explicit_max_ttl"] = token_explicit_max_ttl
__props__.__dict__["token_max_ttl"] = token_max_ttl
__props__.__dict__["token_no_default_policy"] = token_no_default_policy
__props__.__dict__["token_num_uses"] = token_num_uses
__props__.__dict__["token_period"] = token_period
__props__.__dict__["token_policies"] = token_policies
__props__.__dict__["token_ttl"] = token_ttl
__props__.__dict__["token_type"] = token_type
super(CertAuthBackendRole, __self__).__init__(
'vault:index/certAuthBackendRole:CertAuthBackendRole',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
allowed_common_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_dns_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_email_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_organization_units: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_uri_sans: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
backend: Optional[pulumi.Input[str]] = None,
certificate: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
required_extensions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_bound_cidrs: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_explicit_max_ttl: Optional[pulumi.Input[int]] = None,
token_max_ttl: Optional[pulumi.Input[int]] = None,
token_no_default_policy: Optional[pulumi.Input[bool]] = None,
token_num_uses: Optional[pulumi.Input[int]] = None,
token_period: Optional[pulumi.Input[int]] = None,
token_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
token_ttl: Optional[pulumi.Input[int]] = None,
token_type: Optional[pulumi.Input[str]] = None) -> 'CertAuthBackendRole':
"""
Get an existing CertAuthBackendRole resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_common_names: Allowed the common names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_dns_sans: Allowed alternative dns names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_email_sans: Allowed emails for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_names: Allowed subject names for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_organization_units: Allowed organization units for authenticated client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_uri_sans: Allowed URIs for authenticated client certificates
:param pulumi.Input[str] backend: Path to the mounted Cert auth backend
:param pulumi.Input[str] certificate: CA certificate used to validate client certificates
:param pulumi.Input[str] display_name: The name to display on tokens issued under this role.
:param pulumi.Input[str] name: Name of the role
:param pulumi.Input[Sequence[pulumi.Input[str]]] required_extensions: TLS extensions required on client certificates
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_bound_cidrs: List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
:param pulumi.Input[int] token_explicit_max_ttl: If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
:param pulumi.Input[int] token_max_ttl: The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[bool] token_no_default_policy: If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
:param pulumi.Input[int] token_num_uses: The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
:param pulumi.Input[int] token_period: If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
:param pulumi.Input[Sequence[pulumi.Input[str]]] token_policies: List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
:param pulumi.Input[int] token_ttl: The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
:param pulumi.Input[str] token_type: The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _CertAuthBackendRoleState.__new__(_CertAuthBackendRoleState)
__props__.__dict__["allowed_common_names"] = allowed_common_names
__props__.__dict__["allowed_dns_sans"] = allowed_dns_sans
__props__.__dict__["allowed_email_sans"] = allowed_email_sans
__props__.__dict__["allowed_names"] = allowed_names
__props__.__dict__["allowed_organization_units"] = allowed_organization_units
__props__.__dict__["allowed_uri_sans"] = allowed_uri_sans
__props__.__dict__["backend"] = backend
__props__.__dict__["certificate"] = certificate
__props__.__dict__["display_name"] = display_name
__props__.__dict__["name"] = name
__props__.__dict__["required_extensions"] = required_extensions
__props__.__dict__["token_bound_cidrs"] = token_bound_cidrs
__props__.__dict__["token_explicit_max_ttl"] = token_explicit_max_ttl
__props__.__dict__["token_max_ttl"] = token_max_ttl
__props__.__dict__["token_no_default_policy"] = token_no_default_policy
__props__.__dict__["token_num_uses"] = token_num_uses
__props__.__dict__["token_period"] = token_period
__props__.__dict__["token_policies"] = token_policies
__props__.__dict__["token_ttl"] = token_ttl
__props__.__dict__["token_type"] = token_type
return CertAuthBackendRole(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowedCommonNames")
def allowed_common_names(self) -> pulumi.Output[Sequence[str]]:
"""
Allowed the common names for authenticated client certificates
"""
return pulumi.get(self, "allowed_common_names")
@property
@pulumi.getter(name="allowedDnsSans")
def allowed_dns_sans(self) -> pulumi.Output[Sequence[str]]:
"""
Allowed alternative dns names for authenticated client certificates
"""
return pulumi.get(self, "allowed_dns_sans")
@property
@pulumi.getter(name="allowedEmailSans")
def allowed_email_sans(self) -> pulumi.Output[Sequence[str]]:
"""
Allowed emails for authenticated client certificates
"""
return pulumi.get(self, "allowed_email_sans")
@property
@pulumi.getter(name="allowedNames")
def allowed_names(self) -> pulumi.Output[Sequence[str]]:
"""
Allowed subject names for authenticated client certificates
"""
return pulumi.get(self, "allowed_names")
@property
@pulumi.getter(name="allowedOrganizationUnits")
def allowed_organization_units(self) -> pulumi.Output[Sequence[str]]:
"""
Allowed organization units for authenticated client certificates
"""
return pulumi.get(self, "allowed_organization_units")
@property
@pulumi.getter(name="allowedUriSans")
def allowed_uri_sans(self) -> pulumi.Output[Sequence[str]]:
"""
Allowed URIs for authenticated client certificates
"""
return pulumi.get(self, "allowed_uri_sans")
@property
@pulumi.getter
def backend(self) -> pulumi.Output[Optional[str]]:
"""
Path to the mounted Cert auth backend
"""
return pulumi.get(self, "backend")
@property
@pulumi.getter
def certificate(self) -> pulumi.Output[str]:
"""
CA certificate used to validate client certificates
"""
return pulumi.get(self, "certificate")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
The name to display on tokens issued under this role.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the role
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="requiredExtensions")
def required_extensions(self) -> pulumi.Output[Sequence[str]]:
"""
TLS extensions required on client certificates
"""
return pulumi.get(self, "required_extensions")
@property
@pulumi.getter(name="tokenBoundCidrs")
def token_bound_cidrs(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of CIDR blocks; if set, specifies blocks of IP
addresses which can authenticate successfully, and ties the resulting token to these blocks
as well.
"""
return pulumi.get(self, "token_bound_cidrs")
@property
@pulumi.getter(name="tokenExplicitMaxTtl")
def token_explicit_max_ttl(self) -> pulumi.Output[Optional[int]]:
"""
If set, will encode an
[explicit max TTL](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls)
onto the token in number of seconds. This is a hard cap even if `token_ttl` and
`token_max_ttl` would otherwise allow a renewal.
"""
return pulumi.get(self, "token_explicit_max_ttl")
@property
@pulumi.getter(name="tokenMaxTtl")
def token_max_ttl(self) -> pulumi.Output[Optional[int]]:
"""
The maximum lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_max_ttl")
@property
@pulumi.getter(name="tokenNoDefaultPolicy")
def token_no_default_policy(self) -> pulumi.Output[Optional[bool]]:
"""
If set, the default policy will not be set on
generated tokens; otherwise it will be added to the policies set in token_policies.
"""
return pulumi.get(self, "token_no_default_policy")
@property
@pulumi.getter(name="tokenNumUses")
def token_num_uses(self) -> pulumi.Output[Optional[int]]:
"""
The
[period](https://www.vaultproject.io/docs/concepts/tokens.html#token-time-to-live-periodic-tokens-and-explicit-max-ttls),
if any, in number of seconds to set on the token.
"""
return pulumi.get(self, "token_num_uses")
@property
@pulumi.getter(name="tokenPeriod")
def token_period(self) -> pulumi.Output[Optional[int]]:
"""
If set, indicates that the
token generated using this role should never expire. The token should be renewed within the
duration specified by this value. At each renewal, the token's TTL will be set to the
value of this field. Specified in seconds.
"""
return pulumi.get(self, "token_period")
@property
@pulumi.getter(name="tokenPolicies")
def token_policies(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of policies to encode onto generated tokens. Depending
on the auth method, this list may be supplemented by user/group/other values.
"""
return pulumi.get(self, "token_policies")
@property
@pulumi.getter(name="tokenTtl")
def token_ttl(self) -> pulumi.Output[Optional[int]]:
"""
The incremental lifetime for generated tokens in number of seconds.
Its current value will be referenced at renewal time.
"""
return pulumi.get(self, "token_ttl")
@property
@pulumi.getter(name="tokenType")
def token_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of token that should be generated. Can be `service`,
`batch`, or `default` to use the mount's tuned default (which unless changed will be
`service` tokens). For token store roles, there are two additional possibilities:
`default-service` and `default-batch` which specify the type to return unless the client
requests a different type at generation time.
"""
return pulumi.get(self, "token_type")
| 50.81157
| 149
| 0.670782
| 7,570
| 61,482
| 5.239234
| 0.038045
| 0.105671
| 0.06495
| 0.07375
| 0.954061
| 0.947379
| 0.941933
| 0.933839
| 0.931141
| 0.921333
| 0
| 0.000275
| 0.230604
| 61,482
| 1,209
| 150
| 50.853598
| 0.838153
| 0.375866
| 0
| 0.893417
| 1
| 0
| 0.106321
| 0.022515
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167712
| false
| 0.001567
| 0.007837
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
05f5c6287877415417aaf9e911414ec56040db41
| 264,847
|
py
|
Python
|
proteus/points.py
|
dathath/IJCAI_2017_SD
|
0e8912d9c6bc1e40213edbd303e56ab7fc81dbaa
|
[
"BSD-2-Clause"
] | null | null | null |
proteus/points.py
|
dathath/IJCAI_2017_SD
|
0e8912d9c6bc1e40213edbd303e56ab7fc81dbaa
|
[
"BSD-2-Clause"
] | null | null | null |
proteus/points.py
|
dathath/IJCAI_2017_SD
|
0e8912d9c6bc1e40213edbd303e56ab7fc81dbaa
|
[
"BSD-2-Clause"
] | null | null | null |
import random
import copy
data_points=[]
from pwl_classifier import pwl_classifier
import numpy as np
from svm_classifier import svm_classifier
labels=[]
theta1=1.75
y1=0.125
x1=0.0
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.09375
x1=0.0
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.076171875
x1=0.0
alpha1=0.076171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.15625
x1=0.0
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.173828125
x1=0.0
alpha1=0.173828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.125
x1=0.0
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.10546875
x1=0.0
alpha1=0.10546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.14453125
x1=0.0
alpha1=0.14453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.125
x1=0.0
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.09375
x1=0.0
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.03125
x1=0.0
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.0546875
x1=0.0
alpha1=0.0546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.013671875
x1=0.0
alpha1=0.013671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.09375
x1=0.0
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.07421875
x1=0.0
alpha1=0.07421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.111328125
x1=0.0
alpha1=0.111328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.0625
x1=0.0
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.04296875
x1=0.0
alpha1=0.04296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.08203125
x1=0.0
alpha1=0.08203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.0625
x1=0.0
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.00390625
x1=0.0
alpha1=0.00390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.03125
x1=0.0
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.0546875
x1=0.0
alpha1=0.0546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.00390625
x1=0.0
alpha1=0.00390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.0234375
x1=0.0
alpha1=0.0234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.00390625
x1=0.0
alpha1=0.00390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.03125
x1=0.0
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.05078125
x1=0.0
alpha1=0.05078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.734375
y1=0.015625
x1=0.0
alpha1=0.015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6953125
y1=0.0390625
x1=0.0
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.25
x1=0.0
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.21875
x1=0.0
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.201171875
x1=0.0
alpha1=0.201171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.28125
x1=0.0
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.298828125
x1=0.0
alpha1=0.298828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.25
x1=0.0
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.23046875
x1=0.0
alpha1=0.23046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.26953125
x1=0.0
alpha1=0.26953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.25
x1=0.0
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.21875
x1=0.0
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.3125
x1=0.0
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.28125
x1=0.0
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.263671875
x1=0.0
alpha1=0.263671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.34375
x1=0.0
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.361328125
x1=0.0
alpha1=0.361328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.3125
x1=0.0
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.29296875
x1=0.0
alpha1=0.29296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.33203125
x1=0.0
alpha1=0.33203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.3125
x1=0.0
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.28125
x1=0.0
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.1875
x1=0.0
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.15625
x1=0.0
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.138671875
x1=0.0
alpha1=0.138671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.21875
x1=0.0
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.75
y1=0.236328125
x1=0.0
alpha1=0.236328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.1875
x1=0.0
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.16796875
x1=0.0
alpha1=0.16796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.6875
y1=0.20703125
x1=0.0
alpha1=0.20703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.1875
x1=0.0
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.71875
y1=0.15625
x1=0.0
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.125
x1=0.0
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.09375
x1=0.0
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.15625
x1=0.0
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.125
x1=0.0
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.09375
x1=0.0
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.076171875
x1=0.0
alpha1=0.076171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.15625
x1=0.0
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.173828125
x1=0.0
alpha1=0.173828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.609375
y1=0.109375
x1=0.0
alpha1=0.109375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.609375
y1=0.140625
x1=0.0
alpha1=0.140625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.03125
x1=0.0
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.0546875
x1=0.0
alpha1=0.0546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.09375
x1=0.0
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.07421875
x1=0.0
alpha1=0.07421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.03125
x1=0.0
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.0625
x1=0.0
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.013671875
x1=0.0
alpha1=0.013671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.09375
x1=0.0
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.111328125
x1=0.0
alpha1=0.111328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.578125
y1=0.04296875
x1=0.0
alpha1=0.04296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.25
x1=0.0
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.21875
x1=0.0
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.28125
x1=0.0
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.25
x1=0.0
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.21875
x1=0.0
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.201171875
x1=0.0
alpha1=0.201171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.28125
x1=0.0
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.298828125
x1=0.0
alpha1=0.298828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.609375
y1=0.234375
x1=0.0
alpha1=0.234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.609375
y1=0.265625
x1=0.0
alpha1=0.265625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.1875
x1=0.0
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.15625
x1=0.0
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.59375
y1=0.21875
x1=0.0
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.1875
x1=0.0
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.15625
x1=0.0
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.138671875
x1=0.0
alpha1=0.138671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.21875
x1=0.0
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.625
y1=0.236328125
x1=0.0
alpha1=0.236328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.609375
y1=0.171875
x1=0.0
alpha1=0.171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.609375
y1=0.203125
x1=0.0
alpha1=0.203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4045085
x1=0.2938925
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3792267187
x1=0.2755242187
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3650057167
x1=0.2651920605
alpha1=0.451171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4297902812
x1=0.3122607812
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4440112832
x1=0.3225929394
alpha1=0.548828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.4045085
x1=0.2938925
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.4297902812
x1=0.3122607812
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.3792267187
x1=0.2755242187
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.953125
y1=0.3918676093
x1=0.2847083593
alpha1=0.484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.953125
y1=0.4171493906
x1=0.3030766406
alpha1=0.515625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.303381375
x1=0.220419375
alpha1=0.375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2780995937
x1=0.2020510937
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2638785917
x1=0.1917189355
alpha1=0.326171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3286631562
x1=0.2387876562
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3428841582
x1=0.2491198144
alpha1=0.423828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.303381375
x1=0.220419375
alpha1=0.375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.2780995937
x1=0.2020510937
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.3286631562
x1=0.2387876562
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.2907404843
x1=0.2112352343
alpha1=0.359375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.3160222656
x1=0.2296035156
alpha1=0.390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3539449375
x1=0.2571559375
alpha1=0.4375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3286631562
x1=0.2387876562
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3144421542
x1=0.228455498
alpha1=0.388671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3792267187
x1=0.2755242187
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3934477207
x1=0.2858563769
alpha1=0.486328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.3539449375
x1=0.2571559375
alpha1=0.4375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.3286631562
x1=0.2387876562
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.3792267187
x1=0.2755242187
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.3413040468
x1=0.2479717968
alpha1=0.421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.3665858281
x1=0.2663400781
alpha1=0.453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.505635625
x1=0.367365625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5309174062
x1=0.3857339062
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5451384082
x1=0.3960660644
alpha1=0.673828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4803538437
x1=0.3489973437
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4661328417
x1=0.3386651855
alpha1=0.576171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.505635625
x1=0.367365625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.4803538437
x1=0.3489973437
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.5309174062
x1=0.3857339062
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.4929947343
x1=0.3581814843
alpha1=0.609375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.5182765156
x1=0.3765497656
alpha1=0.640625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4550720625
x1=0.3306290625
alpha1=0.5625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4803538437
x1=0.3489973437
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4945748457
x1=0.3593295019
alpha1=0.611328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4297902812
x1=0.3122607812
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4155692792
x1=0.301928623
alpha1=0.513671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.4550720625
x1=0.3306290625
alpha1=0.5625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.4297902812
x1=0.3122607812
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.4803538437
x1=0.3489973437
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.4424311718
x1=0.3214449218
alpha1=0.546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.4677129531
x1=0.3398132031
alpha1=0.578125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.2938925
x1=0.4045085
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.2824123242
x1=0.3887073867
alpha1=0.48046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.3053726757
x1=0.4203096132
alpha1=0.51953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.2938925
x1=0.4045085
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.2755242187
x1=0.3792267187
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.3122607812
x1=0.4297902812
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.2847083593
x1=0.3918676093
alpha1=0.484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.2663400781
x1=0.3665858281
alpha1=0.453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.3030766406
x1=0.4171493906
alpha1=0.515625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.3214449218
x1=0.4424311718
alpha1=0.546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.3260369921
x1=0.4487516171
alpha1=0.5546874999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.3375171679
x1=0.4645527304
alpha1=0.57421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.3145568164
x1=0.4329505039
alpha1=0.53515625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.3260369921
x1=0.4487516171
alpha1=0.5546874999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.3489973437
x1=0.4803538437
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.3375171679
x1=0.4645527304
alpha1=0.57421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.3122607812
x1=0.4297902812
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.3007806054
x1=0.4139891679
alpha1=0.51171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.92578125
y1=0.3065206933
x1=0.4218897246
alpha1=0.521484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9272460937
y1=0.345786482
x1=0.4759344698
alpha1=0.5882873535
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.2571559375
x1=0.3539449375
alpha1=0.4375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.2456757617
x1=0.3381438242
alpha1=0.41796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.2686361132
x1=0.3697460507
alpha1=0.45703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.2571559375
x1=0.3539449375
alpha1=0.4375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.2387876562
x1=0.3286631562
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.2755242187
x1=0.3792267187
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.2479717968
x1=0.3413040468
alpha1=0.421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.2296035156
x1=0.3160222656
alpha1=0.390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.2663400781
x1=0.3665858281
alpha1=0.453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.2847083593
x1=0.3918676093
alpha1=0.484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.3398132031
x1=0.4677129531
alpha1=0.5781249999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.367365625
x1=0.505635625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.353589414
x1=0.486674289
alpha1=0.6015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.3122607812
x1=0.4297902812
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.3260369921
x1=0.4487516171
alpha1=0.5546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.3398132031
x1=0.4677129531
alpha1=0.5781249999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.3512933789
x1=0.4835140664
alpha1=0.59765625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.3283330273
x1=0.4519118398
alpha1=0.55859375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.3398132031
x1=0.4677129531
alpha1=0.5781249999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.3581814843
x1=0.4929947343
alpha1=0.609375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.2938925
x1=0.4045085
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.2755242187
x1=0.3792267187
alpha1=0.4687499999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.2571559375
x1=0.3539449375
alpha1=0.4375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.2468237792
x1=0.3397239355
alpha1=0.419921875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.3042246582
x1=0.4187295019
alpha1=0.517578125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9377441406
y1=0.2663400781
x1=0.3665858281
alpha1=0.453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9377441406
y1=0.2847083593
x1=0.3918676093
alpha1=0.484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.2755242187
x1=0.3792267187
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.2870043945
x1=0.395027832
alpha1=0.48828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.2640440429
x1=0.3634256054
alpha1=0.44921875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7331716562
x1=0.5326801562
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7473926582
x1=0.5430123144
alpha1=0.923828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6683870917
x1=0.4856114355
alpha1=0.826171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.7331716562
x1=0.5326801562
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.6952489843
x1=0.5051277343
alpha1=0.859375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.7205307656
x1=0.5234960156
alpha1=0.890625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.60676275
x1=0.44083875
alpha1=0.75
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6462655332
x1=0.4695391894
alpha1=0.798828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5814809687
x1=0.4224704687
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5672599667
x1=0.4121383105
alpha1=0.701171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.60676275
x1=0.44083875
alpha1=0.75
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.5814809687
x1=0.4224704687
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.5941218593
x1=0.4316546093
alpha1=0.734375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.6194036406
x1=0.4500228906
alpha1=0.765625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6968290957
x1=0.5062757519
alpha1=0.861328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6178235292
x1=0.448874873
alpha1=0.763671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.6446854218
x1=0.4683911718
alpha1=0.796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.6699672031
x1=0.4867594531
alpha1=0.828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.505635625
x1=0.367365625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5309174062
x1=0.3857339062
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5451384082
x1=0.3960660644
alpha1=0.673828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4803538437
x1=0.3489973437
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.4661328417
x1=0.3386651855
alpha1=0.576171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.505635625
x1=0.367365625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.4803538437
x1=0.3489973437
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.5309174062
x1=0.3857339062
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.4929947343
x1=0.3581814843
alpha1=0.609375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.5182765156
x1=0.3765497656
alpha1=0.640625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5561991875
x1=0.4041021875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5814809687
x1=0.4224704687
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5957019707
x1=0.4328026269
alpha1=0.736328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5309174062
x1=0.3857339062
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.5166964042
x1=0.375401748
alpha1=0.638671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.5561991875
x1=0.4041021875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.5309174062
x1=0.3857339062
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.5814809687
x1=0.4224704687
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.5435582968
x1=0.3949180468
alpha1=0.671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.5688400781
x1=0.4132863281
alpha1=0.703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.60676275
x1=0.44083875
alpha1=0.75
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6462655332
x1=0.4695391894
alpha1=0.798828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.5814809687
x1=0.4224704687
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.5672599667
x1=0.4121383105
alpha1=0.701171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.60676275
x1=0.44083875
alpha1=0.75
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.6225638632
x1=0.4523189257
alpha1=0.76953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.5909616367
x1=0.4293585742
alpha1=0.73046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.60676275
x1=0.44083875
alpha1=0.75
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.5814809687
x1=0.4224704687
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.505635625
x1=0.367365625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.5309174062
x1=0.3857339062
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.5451384082
x1=0.3960660644
alpha1=0.673828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.4803538437
x1=0.3489973437
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.4661328417
x1=0.3386651855
alpha1=0.576171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.505635625
x1=0.367365625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.5214367382
x1=0.3788458007
alpha1=0.64453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.4898345117
x1=0.3558854492
alpha1=0.60546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.505635625
x1=0.367365625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.4803538437
x1=0.3489973437
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.5561991875
x1=0.4041021875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.5814809687
x1=0.4224704687
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.5957019707
x1=0.4328026269
alpha1=0.736328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.5309174062
x1=0.3857339062
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.5166964042
x1=0.375401748
alpha1=0.638671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.5561991875
x1=0.4041021875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.5720003007
x1=0.4155823632
alpha1=0.70703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.5403980742
x1=0.3926220117
alpha1=0.66796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.5561991875
x1=0.4041021875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.5309174062
x1=0.3857339062
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7331716562
x1=0.5326801562
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7473926582
x1=0.5430123144
alpha1=0.923828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6683870917
x1=0.4856114355
alpha1=0.826171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.7236909882
x1=0.5257920507
alpha1=0.89453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.6920887617
x1=0.5028316992
alpha1=0.85546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6968290957
x1=0.5062757519
alpha1=0.861328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6178235292
x1=0.448874873
alpha1=0.763671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.6731274257
x1=0.4890554882
alpha1=0.83203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.6415251992
x1=0.4660951367
alpha1=0.79296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.101127125
x1=0.073473125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.0758453437
x1=0.0551048437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.0616243417
x1=0.0447726855
alpha1=0.076171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1264089062
x1=0.0918414062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1406299082
x1=0.1021735644
alpha1=0.173828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.101127125
x1=0.073473125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.0758453437
x1=0.0551048437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.1264089062
x1=0.0918414062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.0884862343
x1=0.0642889843
alpha1=0.109375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1516906875
x1=0.1102096875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1264089062
x1=0.0918414062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1121879042
x1=0.081509248
alpha1=0.138671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1769724687
x1=0.1285779687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1911934707
x1=0.1389101269
alpha1=0.236328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.1516906875
x1=0.1102096875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.1264089062
x1=0.0918414062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.1769724687
x1=0.1285779687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.1390497968
x1=0.1010255468
alpha1=0.171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.1643315781
x1=0.1193938281
alpha1=0.203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.20225425
x1=0.14694625
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1769724687
x1=0.1285779687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1627514667
x1=0.1182458105
alpha1=0.201171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2275360312
x1=0.1653145312
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2417570332
x1=0.1756466894
alpha1=0.298828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.20225425
x1=0.14694625
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.1769724687
x1=0.1285779687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.2275360312
x1=0.1653145312
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.1896133593
x1=0.1377621093
alpha1=0.234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.2148951406
x1=0.1561303906
alpha1=0.265625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.303381375
x1=0.220419375
alpha1=0.375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2780995937
x1=0.2020510937
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2638785917
x1=0.1917189355
alpha1=0.326171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3286631562
x1=0.2387876562
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.3428841582
x1=0.2491198144
alpha1=0.423828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.303381375
x1=0.220419375
alpha1=0.375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.2780995937
x1=0.2020510937
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.3286631562
x1=0.2387876562
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.2907404843
x1=0.2112352343
alpha1=0.359375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.3160222656
x1=0.2296035156
alpha1=0.390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2528178125
x1=0.1836828125
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2275360312
x1=0.1653145312
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2133150292
x1=0.154982373
alpha1=0.263671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2780995937
x1=0.2020510937
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.2923205957
x1=0.2123832519
alpha1=0.361328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.2528178125
x1=0.1836828125
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.2275360312
x1=0.1653145312
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.2780995937
x1=0.2020510937
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.2401769218
x1=0.1744986718
alpha1=0.296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.2654587031
x1=0.1928669531
alpha1=0.328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.14694625
x1=0.20225425
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.1354660742
x1=0.1864531367
alpha1=0.23046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.1584264257
x1=0.2180553632
alpha1=0.26953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.14694625
x1=0.20225425
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.1285779687
x1=0.1769724687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.1653145312
x1=0.2275360312
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1377621093
x1=0.1896133593
alpha1=0.234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1193938281
x1=0.1643315781
alpha1=0.203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1561303906
x1=0.2148951406
alpha1=0.265625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1744986718
x1=0.2401769218
alpha1=0.296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0619929492
x1=0.0853260117
alpha1=0.10546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0849533007
x1=0.1169282382
alpha1=0.14453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0551048437
x1=0.0758453437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0918414062
x1=0.1264089062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0642889843
x1=0.0884862343
alpha1=0.109375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0459207031
x1=0.0632044531
alpha1=0.078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0826572656
x1=0.1137680156
alpha1=0.140625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1010255468
x1=0.1390497968
alpha1=0.171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.1102096875
x1=0.1516906875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0987295117
x1=0.1358895742
alpha1=0.16796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.1216898632
x1=0.1674918007
alpha1=0.20703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.1102096875
x1=0.1516906875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0918414062
x1=0.1264089062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.1285779687
x1=0.1769724687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1010255468
x1=0.1390497968
alpha1=0.171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0826572656
x1=0.1137680156
alpha1=0.140625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1193938281
x1=0.1643315781
alpha1=0.203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1377621093
x1=0.1896133593
alpha1=0.234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.1836828125
x1=0.2528178125
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.1722026367
x1=0.2370166992
alpha1=0.29296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.1951629882
x1=0.2686189257
alpha1=0.33203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.1836828125
x1=0.2528178125
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.1653145312
x1=0.2275360312
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.2020510937
x1=0.2780995937
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1744986718
x1=0.2401769218
alpha1=0.296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1561303906
x1=0.2148951406
alpha1=0.265625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1928669531
x1=0.2654587031
alpha1=0.328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.2112352343
x1=0.2907404843
alpha1=0.359375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.2132442651
x1=0.2935056791
alpha1=0.3627929686
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.2020510937
x1=0.2780995937
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.2250114453
x1=0.3097018203
alpha1=0.3828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.213244265
x1=0.2935056791
alpha1=0.3627929686
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.2020510937
x1=0.2780995937
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.1882748828
x1=0.2591382578
alpha1=0.3203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.2387876562
x1=0.3286631562
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.2250114453
x1=0.3097018203
alpha1=0.3828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.2132442651
x1=0.2935056791
alpha1=0.3627929686
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.2020510937
x1=0.2780995937
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7837352187
x1=0.5694167187
alpha1=0.96875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.8026965546
x1=0.5831929296
alpha1=0.9921875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7584534375
x1=0.5510484375
alpha1=0.9375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7442324355
x1=0.5407162792
alpha1=0.919921875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.7584534375
x1=0.5510484375
alpha1=0.9375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.7837352187
x1=0.5694167187
alpha1=0.96875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.8026965546
x1=0.5831929296
alpha1=0.9921875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.953125
y1=0.7932158867
x1=0.5763048242
alpha1=0.98046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.953125
y1=0.7710943281
x1=0.5602325781
alpha1=0.953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.7710943281
x1=0.5602325781
alpha1=0.953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7331716562
x1=0.5326801562
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7473926582
x1=0.5430123144
alpha1=0.923828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6683870917
x1=0.4856114355
alpha1=0.826171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.7331716562
x1=0.5326801562
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.6952489843
x1=0.5051277343
alpha1=0.859375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.7205307656
x1=0.5234960156
alpha1=0.890625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6968290957
x1=0.5062757519
alpha1=0.861328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6178235292
x1=0.448874873
alpha1=0.763671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.6446854218
x1=0.4683911718
alpha1=0.796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.6699672031
x1=0.4867594531
alpha1=0.828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7584534375
x1=0.5510484375
alpha1=0.9375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7774147734
x1=0.5648246484
alpha1=0.9609375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7331716562
x1=0.5326801562
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7142103203
x1=0.5189039453
alpha1=0.8828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.953125
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.953125
y1=0.7584534375
x1=0.5510484375
alpha1=0.9375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.953125
y1=0.7331716562
x1=0.5326801562
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.953125
y1=0.7837352187
x1=0.5694167187
alpha1=0.96875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9609375
y1=0.7205307656
x1=0.5234960156
alpha1=0.890625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9609375
y1=0.7458125468
x1=0.5418642968
alpha1=0.921875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.8082269443
x1=0.5872109912
alpha1=0.9990234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.790055664
x1=0.574008789
alpha1=0.9765625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.8082269443
x1=0.5872109912
alpha1=0.9990234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.7837352187
x1=0.5694167187
alpha1=0.96875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.7706993002
x1=0.5599455737
alpha1=0.9526367187
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9791124266
y1=0.7963761093
x1=0.5786008593
alpha1=0.984375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.98828125
y1=0.7774147734
x1=0.5648246484
alpha1=0.9609375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.953125
y1=0.7963761093
x1=0.5786008593
alpha1=0.984375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9453125
y1=0.8086219721
x1=0.5874979956
alpha1=0.9995117187
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9453125
y1=0.7837352187
x1=0.5694167187
alpha1=0.96875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.7015694296
x1=0.5097198046
alpha1=0.8671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.6636467578
x1=0.4821673828
alpha1=0.8203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.6920887617
x1=0.5028316992
alpha1=0.85546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.6731274257
x1=0.4890554882
alpha1=0.83203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9892578125
y1=0.7130252368
x1=0.5180429321
alpha1=0.8813476562
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9892578125
y1=0.6521909506
x1=0.4738442553
alpha1=0.8061523437
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7331716562
x1=0.5326801562
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7473926582
x1=0.5430123144
alpha1=0.923828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6683870917
x1=0.4856114355
alpha1=0.826171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.7236909882
x1=0.5257920507
alpha1=0.89453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.6920887617
x1=0.5028316992
alpha1=0.85546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.707889875
x1=0.514311875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6826080937
x1=0.4959435937
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6968290957
x1=0.5062757519
alpha1=0.861328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.6178235292
x1=0.448874873
alpha1=0.763671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.6731274257
x1=0.4890554882
alpha1=0.83203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.6415251992
x1=0.4660951367
alpha1=0.79296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.6573263125
x1=0.4775753125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.6320445312
x1=0.4592070312
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7837352187
x1=0.5694167187
alpha1=0.96875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7647738828
x1=0.5556405078
alpha1=0.9453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7979562207
x1=0.5797488769
alpha1=0.986328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7331716562
x1=0.5326801562
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7489727695
x1=0.544160332
alpha1=0.92578125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7189506542
x1=0.522347998
alpha1=0.888671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.7584534375
x1=0.5510484375
alpha1=0.9375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.7426523242
x1=0.5395682617
alpha1=0.91796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.7742545507
x1=0.5625286132
alpha1=0.95703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.7584534375
x1=0.5510484375
alpha1=0.9375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.8026965546
x1=0.5831929296
alpha1=0.9921875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7837352187
x1=0.5694167187
alpha1=0.96875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.125
y1=0.7647738828
x1=0.5556405078
alpha1=0.9453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1875
y1=0.7963761093
x1=0.5786008593
alpha1=0.984375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.191159091
y1=0.8090046553
x1=0.5877760311
alpha1=0.9999847412
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.7837352187
x1=0.5694167187
alpha1=0.96875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.8026965546
x1=0.5831929296
alpha1=0.9921875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.15625
y1=0.7679341054
x1=0.5579365429
alpha1=0.94921875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.140625
y1=0.7932158867
x1=0.5763048242
alpha1=0.98046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.1796875
y1=0.7837352187
x1=0.5694167187
alpha1=0.96875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.0126408906
x1=0.0091841406
alpha1=0.015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.0505635625
x1=0.0367365625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.0316022265
x1=0.0229603515
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.0647845644
x1=0.0470687207
alpha1=0.080078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0183682812
x1=0.0252817812
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0068881054
x1=0.0094806679
alpha1=0.01171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.029848457
x1=0.0410828945
alpha1=0.05078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.0505635625
x1=0.0367365625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.0252817812
x1=0.0183682812
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.0063204453
x1=0.0045920703
alpha1=0.0078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.101127125
x1=0.073473125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.0758453437
x1=0.0551048437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.0616243417
x1=0.0447726855
alpha1=0.076171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1264089062
x1=0.0918414062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1406299082
x1=0.1021735644
alpha1=0.173828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.101127125
x1=0.073473125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.0758453437
x1=0.0551048437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.1264089062
x1=0.0918414062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.0884862343
x1=0.0642889843
alpha1=0.109375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1516906875
x1=0.1102096875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1264089062
x1=0.0918414062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1121879042
x1=0.081509248
alpha1=0.138671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1769724687
x1=0.1285779687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.1911934707
x1=0.1389101269
alpha1=0.236328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.1516906875
x1=0.1102096875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.1264089062
x1=0.0918414062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.96875
y1=0.1769724687
x1=0.1285779687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.1390497968
x1=0.1010255468
alpha1=0.171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.984375
y1=0.1643315781
x1=0.1193938281
alpha1=0.203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0367365625
x1=0.0505635625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0252563867
x1=0.0347624492
alpha1=0.04296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0482167382
x1=0.0663646757
alpha1=0.08203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0367365625
x1=0.0505635625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0183682812
x1=0.0252817812
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0551048437
x1=0.0758453437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0275524218
x1=0.0379226718
alpha1=0.046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0091841406
x1=0.0126408906
alpha1=0.015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0459207031
x1=0.0632044531
alpha1=0.078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0642889843
x1=0.0884862343
alpha1=0.109375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0022960351
x1=0.0031602226
alpha1=0.00390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0137762109
x1=0.0189613359
alpha1=0.0234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0022960351
x1=0.0031602226
alpha1=0.00390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0183682812
x1=0.0252817812
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.029848457
x1=0.0410828945
alpha1=0.05078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0091841406
x1=0.0126408906
alpha1=0.015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.8828125
y1=2.870043E-4
x1=3.950278E-4
alpha1=4.882812E-4
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.8828125
y1=0.0183682812
x1=0.0252817812
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.8828125
y1=0.0321444921
x1=0.0442431171
alpha1=0.0546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9296875
y1=0.0229603515
x1=0.0316022265
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0619929492
x1=0.0853260117
alpha1=0.10546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0849533007
x1=0.1169282382
alpha1=0.14453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0551048437
x1=0.0758453437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0918414062
x1=0.1264089062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0642889843
x1=0.0884862343
alpha1=0.109375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0459207031
x1=0.0632044531
alpha1=0.078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0826572656
x1=0.1137680156
alpha1=0.140625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1010255468
x1=0.1390497968
alpha1=0.171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.1102096875
x1=0.1516906875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.0987295117
x1=0.1358895742
alpha1=0.16796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.9375
y1=0.1216898632
x1=0.1674918007
alpha1=0.20703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.1102096875
x1=0.1516906875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0918414062
x1=0.1264089062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.1285779687
x1=0.1769724687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1010255468
x1=0.1390497968
alpha1=0.171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.0826572656
x1=0.1137680156
alpha1=0.140625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1193938281
x1=0.1643315781
alpha1=0.203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.890625
y1=0.1377621093
x1=0.1896133593
alpha1=0.234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.0663646757
x1=0.0482167382
alpha1=0.08203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=1.0
y1=0.082165789
x1=0.059696914
alpha1=0.1015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.0367365625
x1=0.0505635625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.0505127734
x1=0.0695248984
alpha1=0.0859375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.0264044042
x1=0.0363425605
alpha1=0.044921875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.0619929492
x1=0.0853260117
alpha1=0.10546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.0838052832
x1=0.1153481269
alpha1=0.142578125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.0551048437
x1=0.0758453437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.0436246679
x1=0.0600442304
alpha1=0.07421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.0872493359
x1=0.1200884609
alpha1=0.1484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.0631409667
x1=0.086906123
alpha1=0.107421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.1102096875
x1=0.1516906875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.0987295117
x1=0.1358895742
alpha1=0.16796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.1205418457
x1=0.1659116894
alpha1=0.205078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.0918414062
x1=0.1264089062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.0803612304
x1=0.1106077929
alpha1=0.13671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.103321582
x1=0.1422100195
alpha1=0.17578125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.1239858984
x1=0.1706520234
alpha1=0.2109374999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.1102096875
x1=0.1516906875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.0964334765
x1=0.1327293515
alpha1=0.1640625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.14694625
x1=0.20225425
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.94140625
y1=0.1354660742
x1=0.1864531367
alpha1=0.23046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.1285779687
x1=0.1769724687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.875
y1=0.1148017578
x1=0.1580111328
alpha1=0.1953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.14694625
x1=0.20225425
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.1102096875
x1=0.1516906875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.90625
y1=0.0987295117
x1=0.1358895742
alpha1=0.16796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1545085
x1=0.4755285
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1448517187
x1=0.4458079687
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1394197792
x1=0.4290901699
alpha1=0.451171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1641652812
x1=0.5052490312
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1695972207
x1=0.52196683
alpha1=0.548828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.1545085
x1=0.4755285
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.1605439882
x1=0.494103832
alpha1=0.51953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.1484730117
x1=0.4569531679
alpha1=0.48046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.53125
y1=0.1545085
x1=0.4755285
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.53125
y1=0.1448517187
x1=0.4458079687
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.115881375
x1=0.356646375
alpha1=0.375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1062245937
x1=0.3269258437
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1007926542
x1=0.3102080449
alpha1=0.326171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1255381562
x1=0.3863669062
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1309700957
x1=0.403084705
alpha1=0.423828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.115881375
x1=0.356646375
alpha1=0.375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1098458867
x1=0.3380710429
alpha1=0.35546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1219168632
x1=0.375221707
alpha1=0.39453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.115881375
x1=0.356646375
alpha1=0.375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.1255381562
x1=0.3863669062
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1351949375
x1=0.4160874375
alpha1=0.4375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1255381562
x1=0.3863669062
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1201062167
x1=0.3696491074
alpha1=0.388671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1448517187
x1=0.4458079687
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1502836582
x1=0.4625257675
alpha1=0.486328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1351949375
x1=0.4160874375
alpha1=0.4375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1291594492
x1=0.3975121054
alpha1=0.41796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1412304257
x1=0.4346627695
alpha1=0.45703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.1351949375
x1=0.4160874375
alpha1=0.4375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.1448517187
x1=0.4458079687
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2027924062
x1=0.6241311562
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2082243457
x1=0.640848955
alpha1=0.673828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1834788437
x1=0.5646900937
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1780469042
x1=0.5479722949
alpha1=0.576171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1871001367
x1=0.5758352929
alpha1=0.60546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1991711132
x1=0.612985957
alpha1=0.64453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.2027924062
x1=0.6241311562
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1738220625
x1=0.5349695625
alpha1=0.5625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1834788437
x1=0.5646900937
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1889107832
x1=0.5814078925
alpha1=0.611328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1641652812
x1=0.5052490312
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1587333417
x1=0.4885312324
alpha1=0.513671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1738220625
x1=0.5349695625
alpha1=0.5625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1677865742
x1=0.5163942304
alpha1=0.54296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1798575507
x1=0.5535448945
alpha1=0.58203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.1738220625
x1=0.5349695625
alpha1=0.5625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.1834788437
x1=0.5646900937
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.2938925
x1=0.4045085
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.2755242187
x1=0.3792267187
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.2657660693
x1=0.3657957724
alpha1=0.4521484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.3122607812
x1=0.4297902812
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.3220189306
x1=0.4432212275
alpha1=0.5478515625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.2847083593
x1=0.3918676093
alpha1=0.484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.3030766406
x1=0.4171493906
alpha1=0.515625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6552734375
y1=0.2847083593
x1=0.3918676093
alpha1=0.484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6552734375
y1=0.3030766406
x1=0.4171493906
alpha1=0.515625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.2938925
x1=0.4045085
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.2581018934
x1=0.3552469347
alpha1=0.4391093571
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.2387876562
x1=0.3286631562
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.2485458056
x1=0.3420941025
alpha1=0.4228515625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.2290295068
x1=0.3152322099
alpha1=0.3896484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.2755242187
x1=0.3792267187
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.2870043945
x1=0.395027832
alpha1=0.48828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6406097412
y1=0.2668131869
x1=0.3672370068
alpha1=0.4539299011
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.2571559375
x1=0.3539449375
alpha1=0.4375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.2755242187
x1=0.3792267187
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.2387876562
x1=0.3286631562
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6284179687
y1=0.2938925
x1=0.4045085
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6284179687
y1=0.3214449218
x1=0.4424311718
alpha1=0.5468749999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6284179687
y1=0.3076687109
x1=0.4234698359
alpha1=0.5234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6284179687
y1=0.3489973437
x1=0.4803538437
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6284179687
y1=0.3352211328
x1=0.4613925078
alpha1=0.5703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.3214449218
x1=0.4424311718
alpha1=0.5468749999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.3352211328
x1=0.4613925078
alpha1=0.5703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.3076687109
x1=0.4234698359
alpha1=0.5234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.3214449218
x1=0.4424311718
alpha1=0.5468749999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.3398132031
x1=0.4677129531
alpha1=0.578125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.34375
y1=0.1545085
x1=0.4755285
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.34375
y1=0.1448517187
x1=0.4458079687
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.34375
y1=0.1641652812
x1=0.5052490312
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.375
y1=0.1545085
x1=0.4755285
alpha1=0.5
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.375
y1=0.1641652812
x1=0.5052490312
alpha1=0.53125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.375
y1=0.1695972207
x1=0.52196683
alpha1=0.548828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.375
y1=0.1448517187
x1=0.4458079687
alpha1=0.46875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.375
y1=0.1394197792
x1=0.4290901699
alpha1=0.451171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.359375
y1=0.1496801093
x1=0.4606682343
alpha1=0.484375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.359375
y1=0.1593368906
x1=0.4903887656
alpha1=0.515625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.34375
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.34375
y1=0.2027924062
x1=0.6241311562
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.34375
y1=0.1834788437
x1=0.5646900937
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.375
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.375
y1=0.2027924062
x1=0.6241311562
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.375
y1=0.2082243457
x1=0.640848955
alpha1=0.673828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.375
y1=0.1834788437
x1=0.5646900937
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.375
y1=0.1780469042
x1=0.5479722949
alpha1=0.576171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.359375
y1=0.1979640156
x1=0.6092708906
alpha1=0.640625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.359375
y1=0.1883072343
x1=0.5795503593
alpha1=0.609375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.038627125
x1=0.118882125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0289703437
x1=0.0891615937
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0235384042
x1=0.0724437949
alpha1=0.076171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0482839062
x1=0.1486026562
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0537158457
x1=0.165320455
alpha1=0.173828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.038627125
x1=0.118882125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0325916367
x1=0.1003067929
alpha1=0.10546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0446626132
x1=0.137457457
alpha1=0.14453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.038627125
x1=0.118882125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0482839062
x1=0.1486026562
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0579406875
x1=0.1783231875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0482839062
x1=0.1486026562
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0428519667
x1=0.1318848574
alpha1=0.138671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0675974687
x1=0.2080437187
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0730294082
x1=0.2247615175
alpha1=0.236328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0579406875
x1=0.1783231875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0519051992
x1=0.1597478554
alpha1=0.16796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0639761757
x1=0.1968985195
alpha1=0.20703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0579406875
x1=0.1783231875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0675974687
x1=0.2080437187
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.07725425
x1=0.23776425
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0675974687
x1=0.2080437187
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0621655292
x1=0.1913259199
alpha1=0.201171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0869110312
x1=0.2674847812
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0923429707
x1=0.28420258
alpha1=0.298828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.07725425
x1=0.23776425
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0712187617
x1=0.2191889179
alpha1=0.23046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0832897382
x1=0.256339582
alpha1=0.26953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.07725425
x1=0.23776425
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0869110312
x1=0.2674847812
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.115881375
x1=0.356646375
alpha1=0.375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1062245937
x1=0.3269258437
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1007926542
x1=0.3102080449
alpha1=0.326171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1255381562
x1=0.3863669062
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1309700957
x1=0.403084705
alpha1=0.423828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.115881375
x1=0.356646375
alpha1=0.375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1098458867
x1=0.3380710429
alpha1=0.35546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1219168632
x1=0.375221707
alpha1=0.39453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.115881375
x1=0.356646375
alpha1=0.375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.1255381562
x1=0.3863669062
alpha1=0.40625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0965678125
x1=0.2972053125
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0869110312
x1=0.2674847812
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0814790917
x1=0.2507669824
alpha1=0.263671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1062245937
x1=0.3269258437
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1116565332
x1=0.3436436425
alpha1=0.361328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0965678125
x1=0.2972053125
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0905323242
x1=0.2786299804
alpha1=0.29296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1026033007
x1=0.3157806445
alpha1=0.33203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0965678125
x1=0.2972053125
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.1062245937
x1=0.3269258437
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.25
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.23046875
alpha1=0.23046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.26953125
alpha1=0.26953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.25
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.21875
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.201171875
alpha1=0.201171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.28125
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.298828125
alpha1=0.298828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.25
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.21875
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.1875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.16796875
alpha1=0.16796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.20703125
alpha1=0.20703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.1875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.15625
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.138671875
alpha1=0.138671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.21875
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.236328125
alpha1=0.236328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.1875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.15625
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.3046875
alpha1=0.3046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.28515625
alpha1=0.28515625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.32421875
alpha1=0.32421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.3046875
alpha1=0.3046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.28125
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.2578125
alpha1=0.2578125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.34375
alpha1=0.34375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.32421875
alpha1=0.32421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.3046875
alpha1=0.3046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.28125
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.1290509467
x1=0.1776234673
alpha1=0.2195546784
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.1193938281
x1=0.1643315781
alpha1=0.203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.1423541796
x1=0.1959338046
alpha1=0.2421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.1290509467
x1=0.1776234673
alpha1=0.2195546784
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.1102096875
x1=0.1516906875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.1196808325
x1=0.1647266059
alpha1=0.2036132812
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.14694625
x1=0.20225425
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.1377621093
x1=0.1896133593
alpha1=0.234375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.1290509467
x1=0.1776234673
alpha1=0.2195546784
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.1193938281
x1=0.1643315781
alpha1=0.203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.1653145312
x1=0.2275360312
alpha1=0.2812499999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.1538343554
x1=0.2117349179
alpha1=0.26171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.176794707
x1=0.2433371445
alpha1=0.30078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.14694625
x1=0.20225425
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.1836828125
x1=0.2528178125
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.1366140917
x1=0.188033248
alpha1=0.232421875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.1653145312
x1=0.2275360312
alpha1=0.28125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.1940149707
x1=0.2670388144
alpha1=0.330078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.14694625
x1=0.20225425
alpha1=0.25
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.1836828125
x1=0.2528178125
alpha1=0.3125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0048283906
x1=0.0148602656
alpha1=0.015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0193135625
x1=0.0594410625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0120709765
x1=0.037150664
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0247455019
x1=0.0761588613
alpha1=0.080078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0096567812
x1=0.0297205312
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0036212929
x1=0.0111451992
alpha1=0.01171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0156922695
x1=0.0482958632
alpha1=0.05078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0193135625
x1=0.0594410625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0096567812
x1=0.0297205312
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0024141953
x1=0.0074301328
alpha1=0.0078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.038627125
x1=0.118882125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0289703437
x1=0.0891615937
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0235384042
x1=0.0724437949
alpha1=0.076171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0482839062
x1=0.1486026562
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0537158457
x1=0.165320455
alpha1=0.173828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.038627125
x1=0.118882125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0325916367
x1=0.1003067929
alpha1=0.10546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0446626132
x1=0.137457457
alpha1=0.14453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.038627125
x1=0.118882125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0482839062
x1=0.1486026562
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0579406875
x1=0.1783231875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0482839062
x1=0.1486026562
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0428519667
x1=0.1318848574
alpha1=0.138671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0675974687
x1=0.2080437187
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.0730294082
x1=0.2247615175
alpha1=0.236328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0579406875
x1=0.1783231875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0519051992
x1=0.1597478554
alpha1=0.16796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.0639761757
x1=0.1968985195
alpha1=0.20703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0579406875
x1=0.1783231875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.0675974687
x1=0.2080437187
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.015625
alpha1=0.015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.046875
alpha1=0.046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.015625
alpha1=0.015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.0625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.080078125
alpha1=0.080078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.0390625
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.015625
alpha1=0.015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.0625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.0390625
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.2421875
y1=0.0
x1=0.03125
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.10546875
alpha1=0.10546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.14453125
alpha1=0.14453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.09375
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.076171875
alpha1=0.076171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.15625
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3134765625
y1=0.0
x1=0.173828125
alpha1=0.173828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.09375
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.0625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.0390625
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.09375
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.25
y1=0.0
x1=0.1171875
alpha1=0.1171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3125
y1=0.0
x1=0.0625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3125
y1=0.0
x1=0.0390625
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3125
y1=0.0
x1=0.09375
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.3125
y1=0.0
x1=0.1171875
alpha1=0.1171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.0625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.28125
y1=0.0
x1=0.03125
alpha1=0.03125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.0091841406
x1=0.0126408906
alpha1=0.015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.0275524218
x1=0.0379226718
alpha1=0.046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.0091841406
x1=0.0126408906
alpha1=0.015625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.0367365625
x1=0.0505635625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.0229603515
x1=0.0316022265
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.671875
y1=0.0022960351
x1=0.0031602226
alpha1=0.00390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.671875
y1=0.016072246
x1=0.0221215585
alpha1=0.02734375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.671875
y1=0.029848457
x1=0.0410828945
alpha1=0.05078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.671875
y1=0.0459207031
x1=0.0632044531
alpha1=0.078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.0022960351
x1=0.0031602226
alpha1=0.00390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.0619929492
x1=0.0853260117
alpha1=0.10546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.0849533007
x1=0.1169282382
alpha1=0.14453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.0918414062
x1=0.1264089062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.0551048437
x1=0.0758453437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.1021735644
x1=0.1406299082
alpha1=0.173828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.0447726855
x1=0.0616243417
alpha1=0.076171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.0551048437
x1=0.0758453437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.0367365625
x1=0.0505635625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.0229603515
x1=0.0316022265
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.0551048437
x1=0.0758453437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.75
y1=0.0688810546
x1=0.0948066796
alpha1=0.1171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.0367365625
x1=0.0505635625
alpha1=0.0625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.0229603515
x1=0.0316022265
alpha1=0.0390625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.0551048437
x1=0.0758453437
alpha1=0.09375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6875
y1=0.0688810546
x1=0.0948066796
alpha1=0.1171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.703125
y1=0.029848457
x1=0.0410828945
alpha1=0.05078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.703125
y1=0.0459207031
x1=0.0632044531
alpha1=0.078125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.073473125
x1=0.101127125
alpha1=0.125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.0918414062
x1=0.1264089062
alpha1=0.15625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.1102096875
x1=0.1516906875
alpha1=0.1875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.640625
y1=0.1285779687
x1=0.1769724687
alpha1=0.21875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.1010255468
x1=0.1390497968
alpha1=0.1718749999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.0826572656
x1=0.1137680156
alpha1=0.140625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.65625
y1=0.1193938281
x1=0.1643315781
alpha1=0.203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.1010255468
x1=0.1390497968
alpha1=0.1718749999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.0826572656
x1=0.1137680156
alpha1=0.140625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.6328125
y1=0.1193938281
x1=0.1643315781
alpha1=0.203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.270389875
x1=0.832174875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2800466562
x1=0.8618954062
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2854785957
x1=0.878613205
alpha1=0.923828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2607330937
x1=0.8024543437
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2553011542
x1=0.7857365449
alpha1=0.826171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.270389875
x1=0.832174875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.2643543867
x1=0.8135995429
alpha1=0.85546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.2764253632
x1=0.850750207
alpha1=0.89453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.270389875
x1=0.832174875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.2800466562
x1=0.8618954062
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.23176275
x1=0.71329275
alpha1=0.75
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2414195312
x1=0.7430132812
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2468514707
x1=0.75973108
alpha1=0.798828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2221059687
x1=0.6835722187
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2166740292
x1=0.6668544199
alpha1=0.701171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.23176275
x1=0.71329275
alpha1=0.75
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.2257272617
x1=0.6947174179
alpha1=0.73046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.2377982382
x1=0.731868082
alpha1=0.76953125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.23176275
x1=0.71329275
alpha1=0.75
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.2414195312
x1=0.7430132812
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2510763125
x1=0.7727338125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2607330937
x1=0.8024543437
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2661650332
x1=0.8191721425
alpha1=0.861328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2414195312
x1=0.7430132812
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2359875917
x1=0.7262954824
alpha1=0.763671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.2510763125
x1=0.7727338125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.2450408242
x1=0.7541584804
alpha1=0.79296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.2571118007
x1=0.7913091445
alpha1=0.83203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.2510763125
x1=0.7727338125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.2607330937
x1=0.8024543437
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2027924062
x1=0.6241311562
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2082243457
x1=0.640848955
alpha1=0.673828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1834788437
x1=0.5646900937
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1780469042
x1=0.5479722949
alpha1=0.576171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1871001367
x1=0.5758352929
alpha1=0.60546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.1991711132
x1=0.612985957
alpha1=0.64453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.2027924062
x1=0.6241311562
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2124491875
x1=0.6538516875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2221059687
x1=0.6835722187
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2275379082
x1=0.7002900175
alpha1=0.736328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.2027924062
x1=0.6241311562
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5
y1=0.1973604667
x1=0.6074133574
alpha1=0.638671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.2124491875
x1=0.6538516875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.2064136992
x1=0.6352763554
alpha1=0.66796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.4375
y1=0.2184846757
x1=0.6724270195
alpha1=0.70703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.2124491875
x1=0.6538516875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.46875
y1=0.2221059687
x1=0.6835722187
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.270389875
x1=0.832174875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2800466562
x1=0.8618954062
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2854785957
x1=0.878613205
alpha1=0.923828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2607330937
x1=0.8024543437
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2553011542
x1=0.7857365449
alpha1=0.826171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.270389875
x1=0.832174875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.2643543867
x1=0.8135995429
alpha1=0.85546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.2764253632
x1=0.850750207
alpha1=0.89453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.59375
y1=0.270389875
x1=0.832174875
alpha1=0.875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.59375
y1=0.2800466562
x1=0.8618954062
alpha1=0.90625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2510763125
x1=0.7727338125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2607330937
x1=0.8024543437
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2661650332
x1=0.8191721425
alpha1=0.861328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2414195312
x1=0.7430132812
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2359875917
x1=0.7262954824
alpha1=0.763671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.2510763125
x1=0.7727338125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.2450408242
x1=0.7541584804
alpha1=0.79296875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.2571118007
x1=0.7913091445
alpha1=0.83203125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.59375
y1=0.2510763125
x1=0.7727338125
alpha1=0.8125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.59375
y1=0.2607330937
x1=0.8024543437
alpha1=0.84375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2341769453
x1=0.7207228828
alpha1=0.7578124999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2414195312
x1=0.7430132812
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2486621171
x1=0.7653036796
alpha1=0.8046875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2221059687
x1=0.6835722187
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.228141457
x1=0.7021475507
alpha1=0.73828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.2341769453
x1=0.7207228828
alpha1=0.7578124999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.2402124335
x1=0.7392982148
alpha1=0.77734375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.228141457
x1=0.7021475507
alpha1=0.73828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.59375
y1=0.2341769453
x1=0.7207228828
alpha1=0.7578124999
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.59375
y1=0.2414195312
x1=0.7430132812
alpha1=0.78125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2027924062
x1=0.6241311562
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2082243457
x1=0.640848955
alpha1=0.673828125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.1834788437
x1=0.5646900937
alpha1=0.59375
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.1780469042
x1=0.5479722949
alpha1=0.576171875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.1871001367
x1=0.5758352929
alpha1=0.60546875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.1991711132
x1=0.612985957
alpha1=0.64453125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.59375
y1=0.193135625
x1=0.594410625
alpha1=0.625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.59375
y1=0.2027924062
x1=0.6241311562
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2124491875
x1=0.6538516875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2221059687
x1=0.6835722187
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2275379082
x1=0.7002900175
alpha1=0.736328125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.2027924062
x1=0.6241311562
alpha1=0.65625
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.626953125
y1=0.1973604667
x1=0.6074133574
alpha1=0.638671875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.2124491875
x1=0.6538516875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.2064136992
x1=0.6352763554
alpha1=0.66796875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.5625
y1=0.2184846757
x1=0.6724270195
alpha1=0.70703125
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.59375
y1=0.2124491875
x1=0.6538516875
alpha1=0.6875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=0.59375
y1=0.2221059687
x1=0.6835722187
alpha1=0.71875
labels.append(1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.0
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.9375
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.96875
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.0625
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.03125
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.0
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=0.9765625
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.0234375
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0482839062
x1=1.0
alpha1=0.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0482839062
x1=0.9375
alpha1=0.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.875
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.8125
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.84375
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.9375
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.90625
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=0.875
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=0.8515625
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=0.8984375
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0482839062
x1=0.875
alpha1=0.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0482839062
x1=0.8125
alpha1=0.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.125
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.0625
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.09375
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.1875
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.15625
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.125
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.1015625
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.1484375
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0482839062
x1=1.125
alpha1=0.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0482839062
x1=1.0625
alpha1=0.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.0
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=0.875
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=0.9375
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=0.90625
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=0.96875
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=0.9375
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=0.9140625
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=0.9609375
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0675974687
x1=1.0
alpha1=0.21875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0675974687
x1=0.875
alpha1=0.21875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.0
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.125
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.0625
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.03125
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0579406875
x1=1.09375
alpha1=0.1875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=1.0625
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=1.0390625
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=1.0859375
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0675974687
x1=1.0
alpha1=0.21875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0675974687
x1=1.125
alpha1=0.21875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0690983018
x1=1.0
alpha1=0.2236067977
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0690983018
x1=0.9375
alpha1=0.2236067977
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0690983018
x1=0.96875
alpha1=0.2236067977
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0690983018
x1=1.0625
alpha1=0.2236067977
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0690983018
x1=1.03125
alpha1=0.2236067977
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=1.0
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=0.9375
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=0.96875
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=1.0625
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.07725425
x1=1.03125
alpha1=0.25
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.0
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.125
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.0625
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.09375
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.03125
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.0625
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.0859375
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.0390625
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0289703437
x1=1.0
alpha1=0.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0289703437
x1=1.125
alpha1=0.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=0.75
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=0.875
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=0.8125
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=0.84375
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=0.78125
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.8125
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.8359375
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.7890625
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0289703437
x1=0.75
alpha1=0.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0289703437
x1=0.875
alpha1=0.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.0
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=0.875
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=0.9375
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=0.96875
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=0.90625
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.9375
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.9609375
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=0.9140625
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0289703437
x1=1.0
alpha1=0.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0289703437
x1=0.875
alpha1=0.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.25
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.125
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.1875
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.21875
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0193135625
x1=1.15625
alpha1=0.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.1875
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.2109375
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.038627125
x1=1.1640625
alpha1=0.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0289703437
x1=1.25
alpha1=0.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=2.513274
y1=0.0289703437
x1=1.125
alpha1=0.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.309017
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3186737812
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3247092695
x1=-0.8500998945
alpha1=1.05078125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2993602187
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2933247304
x1=-0.7679341054
alpha1=0.94921875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.25
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.28125
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.375
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.34375
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.28125
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.347644125
x1=-0.910144125
alpha1=1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3573009062
x1=-0.9354259062
alpha1=1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3633363945
x1=-0.9512270195
alpha1=1.17578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3379873437
x1=-0.8848623437
alpha1=1.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3319518554
x1=-0.8690612304
alpha1=1.07421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3125
x1=-0.910144125
alpha1=1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.28125
x1=-0.910144125
alpha1=1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.375
x1=-0.910144125
alpha1=1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.40625
x1=-0.910144125
alpha1=1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3125
x1=-0.9354259062
alpha1=1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3283305625
x1=-0.8595805625
alpha1=1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3379873437
x1=-0.8848623437
alpha1=1.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.344022832
x1=-0.900663457
alpha1=1.11328125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3186737812
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3126382929
x1=-0.8184976679
alpha1=1.01171875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.28125
x1=-0.8595805625
alpha1=1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3046875
x1=-0.8595805625
alpha1=1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2578125
x1=-0.8595805625
alpha1=1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.375
x1=-0.8595805625
alpha1=1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3515625
x1=-0.8595805625
alpha1=1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.270389875
x1=-0.707889875
alpha1=0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2607330937
x1=-0.6826080937
alpha1=0.84375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2546976054
x1=-0.6668069804
alpha1=0.82421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2800466562
x1=-0.7331716562
alpha1=0.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2860821445
x1=-0.7489727695
alpha1=0.92578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.707889875
alpha1=0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2421875
x1=-0.707889875
alpha1=0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3125
x1=-0.707889875
alpha1=0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3359375
x1=-0.707889875
alpha1=0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.6826080937
alpha1=0.84375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2897034375
x1=-0.7584534375
alpha1=0.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2800466562
x1=-0.7331716562
alpha1=0.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2740111679
x1=-0.7173705429
alpha1=0.88671875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2993602187
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.305395707
x1=-0.799536332
alpha1=0.98828125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.25
x1=-0.7584534375
alpha1=0.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2265625
x1=-0.7584534375
alpha1=0.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3125
x1=-0.7584534375
alpha1=0.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.34375
x1=-0.7584534375
alpha1=0.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.25
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.125
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0625
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.09375
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1875
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.15625
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.125
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.09375
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0703125
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.15625
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1796875
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1875
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.15625
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.25
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.28125
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1875
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1640625
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.25
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2734375
x1=-0.8342987812
alpha1=1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5625
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.53125
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4375
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.46875
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.53125
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5546875
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.46875
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4453125
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.40625
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4375
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.46875
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.375
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.34375
x1=-0.809017
alpha1=1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.40625
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4375
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4609375
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.375
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3515625
x1=-0.7837352187
alpha1=0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.25
x1=-0.910144125
alpha1=1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1875
x1=-0.910144125
alpha1=1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.910144125
alpha1=1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3125
x1=-0.910144125
alpha1=1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.28125
x1=-0.910144125
alpha1=1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.25
x1=-0.9354259062
alpha1=1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.9354259062
alpha1=1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1953125
x1=-0.9354259062
alpha1=1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.28125
x1=-0.9354259062
alpha1=1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3046875
x1=-0.9354259062
alpha1=1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.309017
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3186737812
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3247092695
x1=0.8500998945
alpha1=-1.05078125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2993602187
x1=0.7837352187
alpha1=-0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2933247304
x1=0.7679341054
alpha1=-0.94921875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.347644125
x1=0.910144125
alpha1=-1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3573009062
x1=0.9354259062
alpha1=-1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3633363945
x1=0.9512270195
alpha1=-1.17578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3379873437
x1=0.8848623437
alpha1=-1.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3319518554
x1=0.8690612304
alpha1=-1.07421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=0.910144125
alpha1=-1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=0.910144125
alpha1=-1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.910144125
alpha1=-1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.910144125
alpha1=-1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.390625
x1=0.9354259062
alpha1=-1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3283305625
x1=0.8595805625
alpha1=-1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3379873437
x1=0.8848623437
alpha1=-1.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.344022832
x1=0.900663457
alpha1=-1.11328125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3186737812
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3126382929
x1=0.8184976679
alpha1=-1.01171875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=0.8595805625
alpha1=-1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3515625
x1=0.8595805625
alpha1=-1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3984375
x1=0.8595805625
alpha1=-1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.8595805625
alpha1=-1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2578125
x1=0.8595805625
alpha1=-1.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.270389875
x1=0.707889875
alpha1=-0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2800466562
x1=0.7331716562
alpha1=-0.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2860821445
x1=0.7489727695
alpha1=-0.92578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2607330937
x1=0.6826080937
alpha1=-0.84375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2546976054
x1=0.6668069804
alpha1=-0.82421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.707889875
alpha1=-0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3359375
x1=0.707889875
alpha1=-0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.707889875
alpha1=-0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2421875
x1=0.707889875
alpha1=-0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.7331716562
alpha1=-0.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2897034375
x1=0.7584534375
alpha1=-0.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2993602187
x1=0.7837352187
alpha1=-0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.305395707
x1=0.799536332
alpha1=-0.98828125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2800466562
x1=0.7331716562
alpha1=-0.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2740111679
x1=0.7173705429
alpha1=-0.88671875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.7584534375
alpha1=-0.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.7584534375
alpha1=-0.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.7584534375
alpha1=-0.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2265625
x1=0.7584534375
alpha1=-0.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.7837352187
alpha1=-0.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5625
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.53125
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.53125
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5546875
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4453125
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4609375
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3515625
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.125
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0625
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.09375
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.15625
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.125
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.09375
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0703125
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.15625
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1796875
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.15625
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.809017
alpha1=-1.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1640625
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2734375
x1=0.8342987812
alpha1=-1.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=0.910144125
alpha1=-1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=0.910144125
alpha1=-1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=0.910144125
alpha1=-1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=0.910144125
alpha1=-1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=0.910144125
alpha1=-1.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=0.9354259062
alpha1=-1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=0.9354259062
alpha1=-1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4921875
x1=0.9354259062
alpha1=-1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=0.9354259062
alpha1=-1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3828125
x1=0.9354259062
alpha1=-1.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.618034
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6276907812
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6337262695
x1=1.6591168945
alpha1=-2.05078125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6083772187
x1=1.5927522187
alpha1=-1.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6023417304
x1=1.5769511054
alpha1=-1.94921875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6875
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.65625
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5625
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.59375
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.65625
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.656661125
x1=1.719161125
alpha1=-2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6663179062
x1=1.7444429062
alpha1=-2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6723533945
x1=1.7602440195
alpha1=-2.17578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6470043437
x1=1.6938793437
alpha1=-2.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6409688554
x1=1.6780782304
alpha1=-2.07421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6875
x1=1.719161125
alpha1=-2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.71875
x1=1.719161125
alpha1=-2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.625
x1=1.719161125
alpha1=-2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.59375
x1=1.719161125
alpha1=-2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.703125
x1=1.7444429062
alpha1=-2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6373475625
x1=1.6685975625
alpha1=-2.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6470043437
x1=1.6938793437
alpha1=-2.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.653039832
x1=1.709680457
alpha1=-2.11328125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6276907812
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6216552929
x1=1.6275146679
alpha1=-2.01171875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6875
x1=1.6685975625
alpha1=-2.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6640625
x1=1.6685975625
alpha1=-2.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.59375
x1=1.6685975625
alpha1=-2.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5703125
x1=1.6685975625
alpha1=-2.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6875
x1=1.6938793437
alpha1=-2.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.579406875
x1=1.516906875
alpha1=-1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5890636562
x1=1.5421886562
alpha1=-1.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5950991445
x1=1.5579897695
alpha1=-1.92578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5697500937
x1=1.4916250937
alpha1=-1.84375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5637146054
x1=1.4758239804
alpha1=-1.82421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.625
x1=1.516906875
alpha1=-1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6025390625
x1=1.516906875
alpha1=-1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6484375
x1=1.516906875
alpha1=-1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.53125
x1=1.516906875
alpha1=-1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5087890625
x1=1.516906875
alpha1=-1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5987204375
x1=1.5674704375
alpha1=-1.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6083772187
x1=1.5927522187
alpha1=-1.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.614412707
x1=1.608553332
alpha1=-1.98828125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5890636562
x1=1.5421886562
alpha1=-1.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5830281679
x1=1.5263875429
alpha1=-1.88671875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.625
x1=1.5674704375
alpha1=-1.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.65625
x1=1.5674704375
alpha1=-1.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5625
x1=1.5674704375
alpha1=-1.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.53125
x1=1.5674704375
alpha1=-1.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.65625
x1=1.5927522187
alpha1=-1.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.75
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.8125
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.78125
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6875
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.71875
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.75
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.78125
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.8046875
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.71875
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6953125
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.828125
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.875
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.8515625
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.8984375
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.78125
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.7578125
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.8046875
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.828125
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.875
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.8515625
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5625
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.53125
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4453125
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.53125
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5546875
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=1.618034
alpha1=-2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3515625
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4609375
x1=1.6433157812
alpha1=-2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.75
x1=1.719161125
alpha1=-2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.8125
x1=1.719161125
alpha1=-2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.78125
x1=1.719161125
alpha1=-2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6875
x1=1.719161125
alpha1=-2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.71875
x1=1.719161125
alpha1=-2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.75
x1=1.7444429062
alpha1=-2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.78125
x1=1.7444429062
alpha1=-2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.8046875
x1=1.7444429062
alpha1=-2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.71875
x1=1.7444429062
alpha1=-2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6953125
x1=1.7444429062
alpha1=-2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1545085
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1448517187
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1388162304
x1=-0.3634256054
alpha1=0.44921875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1641652812
x1=-0.4297902812
alpha1=0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1702007695
x1=-0.4455913945
alpha1=0.55078125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.125
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.09375
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1875
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.109375
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.115881375
x1=-0.303381375
alpha1=0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1062245937
x1=-0.2780995937
alpha1=0.34375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1001891054
x1=-0.2622984804
alpha1=0.32421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1255381562
x1=-0.3286631562
alpha1=0.40625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1315736445
x1=-0.3444642695
alpha1=0.42578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0625
x1=-0.303381375
alpha1=0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0859375
x1=-0.303381375
alpha1=0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.15625
x1=-0.303381375
alpha1=0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1796875
x1=-0.303381375
alpha1=0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0625
x1=-0.2780995937
alpha1=0.34375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1351949375
x1=-0.3539449375
alpha1=0.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1255381562
x1=-0.3286631562
alpha1=0.40625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1195026679
x1=-0.3128620429
alpha1=0.38671875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1448517187
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.150887207
x1=-0.395027832
alpha1=0.48828125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.09375
x1=-0.3539449375
alpha1=0.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0703125
x1=-0.3539449375
alpha1=0.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1875
x1=-0.3539449375
alpha1=0.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1640625
x1=-0.3539449375
alpha1=0.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.09375
x1=-0.3286631562
alpha1=0.40625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.193135625
x1=-0.505635625
alpha1=0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1834788437
x1=-0.4803538437
alpha1=0.59375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1774433554
x1=-0.4645527304
alpha1=0.57421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2027924062
x1=-0.5309174062
alpha1=0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2088278945
x1=-0.5467185195
alpha1=0.67578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.125
x1=-0.505635625
alpha1=0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.15625
x1=-0.505635625
alpha1=0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.25
x1=-0.505635625
alpha1=0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.505635625
alpha1=0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.15625
x1=-0.4803538437
alpha1=0.59375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1738220625
x1=-0.4550720625
alpha1=0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1641652812
x1=-0.4297902812
alpha1=0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1581297929
x1=-0.4139891679
alpha1=0.51171875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1834788437
x1=-0.4803538437
alpha1=0.59375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.189514332
x1=-0.496154957
alpha1=0.61328125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.125
x1=-0.4550720625
alpha1=0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1484375
x1=-0.4550720625
alpha1=0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.4550720625
alpha1=0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.2421875
x1=-0.4550720625
alpha1=0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1962890625
x1=-0.4550720625
alpha1=0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0625
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.03125
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0625
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.03125
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.03125
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0546875
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.03125
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0546875
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.078125
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.03125
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0546875
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0078125
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.125
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1015625
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1484375
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.078125
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.03125
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0546875
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.25
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3125
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.28125
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1875
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.25
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.28125
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3046875
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.21875
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.1953125
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.375
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4375
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.40625
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3125
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.34375
x1=-0.4045085
alpha1=0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.375
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.40625
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4296875
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.34375
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3203125
x1=-0.3792267187
alpha1=0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0
x1=-0.303381375
alpha1=0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0625
x1=-0.303381375
alpha1=0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.03125
x1=-0.303381375
alpha1=0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0625
x1=-0.303381375
alpha1=0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.03125
x1=-0.303381375
alpha1=0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0
x1=-0.2780995937
alpha1=0.34375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.03125
x1=-0.2780995937
alpha1=0.34375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0546875
x1=-0.2780995937
alpha1=0.34375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.03125
x1=-0.2780995937
alpha1=0.34375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0546875
x1=-0.2780995937
alpha1=0.34375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.618034
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6276907812
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6337262695
x1=-1.6591168945
alpha1=2.05078125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6083772187
x1=-1.5927522187
alpha1=1.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6023417304
x1=-1.5769511054
alpha1=1.94921875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5625
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.59375
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6875
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.65625
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5625
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.656661125
x1=-1.719161125
alpha1=2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6663179062
x1=-1.7444429062
alpha1=2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6723533945
x1=-1.7602440195
alpha1=2.17578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6470043437
x1=-1.6938793437
alpha1=2.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6409688554
x1=-1.6780782304
alpha1=2.07421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.625
x1=-1.719161125
alpha1=2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.59375
x1=-1.719161125
alpha1=2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6875
x1=-1.719161125
alpha1=2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.71875
x1=-1.719161125
alpha1=2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.625
x1=-1.7444429062
alpha1=2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6373475625
x1=-1.6685975625
alpha1=2.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6470043437
x1=-1.6938793437
alpha1=2.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.653039832
x1=-1.709680457
alpha1=2.11328125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6276907812
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6216552929
x1=-1.6275146679
alpha1=2.01171875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.59375
x1=-1.6685975625
alpha1=2.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5703125
x1=-1.6685975625
alpha1=2.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6875
x1=-1.6685975625
alpha1=2.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6640625
x1=-1.6685975625
alpha1=2.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.59375
x1=-1.6938793437
alpha1=2.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.579406875
x1=-1.516906875
alpha1=1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5890636562
x1=-1.5421886562
alpha1=1.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5950991445
x1=-1.5579897695
alpha1=1.92578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5697500937
x1=-1.4916250937
alpha1=1.84375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5637146054
x1=-1.4758239804
alpha1=1.82421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.53125
x1=-1.516906875
alpha1=1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5546875
x1=-1.516906875
alpha1=1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5087890625
x1=-1.516906875
alpha1=1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.625
x1=-1.516906875
alpha1=1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6484375
x1=-1.516906875
alpha1=1.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5987204375
x1=-1.5674704375
alpha1=1.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6083772187
x1=-1.5927522187
alpha1=1.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.614412707
x1=-1.608553332
alpha1=1.98828125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5890636562
x1=-1.5421886562
alpha1=1.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5830281679
x1=-1.5263875429
alpha1=1.88671875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5625
x1=-1.5674704375
alpha1=1.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.53125
x1=-1.5674704375
alpha1=1.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.625
x1=-1.5674704375
alpha1=1.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.65625
x1=-1.5674704375
alpha1=1.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5625
x1=-1.5927522187
alpha1=1.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4375
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.46875
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5625
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.53125
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.46875
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4453125
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.53125
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5546875
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.40625
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.375
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.34375
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4375
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.46875
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.40625
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.375
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.3515625
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4375
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4609375
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.75
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8125
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.78125
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6875
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.71875
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.75
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.78125
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8046875
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.71875
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6953125
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.828125
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8984375
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8515625
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.78125
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8046875
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.7578125
x1=-1.618034
alpha1=2.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.828125
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8515625
x1=-1.6433157812
alpha1=2.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5
x1=-1.719161125
alpha1=2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4375
x1=-1.719161125
alpha1=2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.46875
x1=-1.719161125
alpha1=2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5625
x1=-1.719161125
alpha1=2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.53125
x1=-1.719161125
alpha1=2.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5
x1=-1.7444429062
alpha1=2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.46875
x1=-1.7444429062
alpha1=2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.4453125
x1=-1.7444429062
alpha1=2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.53125
x1=-1.7444429062
alpha1=2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.5546875
x1=-1.7444429062
alpha1=2.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1545085
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1641652812
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1702007695
x1=0.4455913945
alpha1=-0.55078125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1448517187
x1=0.3792267187
alpha1=-0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1388162304
x1=0.3634256054
alpha1=-0.44921875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.125
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.09375
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.193135625
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2027924062
x1=0.5309174062
alpha1=-0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2088278945
x1=0.5467185195
alpha1=-0.67578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1834788437
x1=0.4803538437
alpha1=-0.59375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1774433554
x1=0.4645527304
alpha1=-0.57421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.125
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.15625
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.5309174062
alpha1=-0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1738220625
x1=0.4550720625
alpha1=-0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1834788437
x1=0.4803538437
alpha1=-0.59375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.189514332
x1=0.496154957
alpha1=-0.61328125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1641652812
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1581297929
x1=0.4139891679
alpha1=-0.51171875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.4550720625
alpha1=-0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1962890625
x1=0.4550720625
alpha1=-0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2421875
x1=0.4550720625
alpha1=-0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.125
x1=0.4550720625
alpha1=-0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1484375
x1=0.4550720625
alpha1=-0.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.115881375
x1=0.303381375
alpha1=-0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1255381562
x1=0.3286631562
alpha1=-0.40625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1315736445
x1=0.3444642695
alpha1=-0.42578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1062245937
x1=0.2780995937
alpha1=-0.34375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1001891054
x1=0.2622984804
alpha1=-0.32421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.15625
x1=0.303381375
alpha1=-0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1796875
x1=0.303381375
alpha1=-0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0625
x1=0.303381375
alpha1=-0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0859375
x1=0.303381375
alpha1=-0.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.15625
x1=0.3286631562
alpha1=-0.40625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1351949375
x1=0.3539449375
alpha1=-0.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1448517187
x1=0.3792267187
alpha1=-0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.150887207
x1=0.395027832
alpha1=-0.48828125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1255381562
x1=0.3286631562
alpha1=-0.40625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1195026679
x1=0.3128620429
alpha1=-0.38671875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.3539449375
alpha1=-0.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1640625
x1=0.3539449375
alpha1=-0.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.09375
x1=0.3539449375
alpha1=-0.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0703125
x1=0.3539449375
alpha1=-0.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.3792267187
alpha1=-0.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3046875
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1953125
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4296875
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3203125
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0625
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.03125
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0625
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.03125
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.03125
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0546875
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.03125
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0546875
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.078125
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.03125
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0078125
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0546875
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.125
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1015625
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1484375
x1=0.4045085
alpha1=-0.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.078125
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.03125
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0546875
x1=0.4297902812
alpha1=-0.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.5309174062
alpha1=-0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.5309174062
alpha1=-0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3359375
x1=0.5309174062
alpha1=-0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.5309174062
alpha1=-0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2265625
x1=0.5309174062
alpha1=-0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.927051
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9367077812
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9427432695
x1=-2.4681338945
alpha1=3.05078125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9173942187
x1=-2.4017692187
alpha1=2.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9113587304
x1=-2.3859681054
alpha1=2.94921875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8984375
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.96875
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9921875
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.965678125
x1=-2.528178125
alpha1=3.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9753349062
x1=-2.5534599062
alpha1=3.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9813703945
x1=-2.5692610195
alpha1=3.17578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9560213437
x1=-2.5028963437
alpha1=3.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9499858554
x1=-2.4870952304
alpha1=3.07421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9375
x1=-2.528178125
alpha1=3.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.90625
x1=-2.528178125
alpha1=3.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.0
x1=-2.528178125
alpha1=3.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.03125
x1=-2.528178125
alpha1=3.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9375
x1=-2.5534599062
alpha1=3.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9463645625
x1=-2.4776145625
alpha1=3.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9560213437
x1=-2.5028963437
alpha1=3.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.962056832
x1=-2.518697457
alpha1=3.11328125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9367077812
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9306722929
x1=-2.4365316679
alpha1=3.01171875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.90625
x1=-2.4776145625
alpha1=3.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8828125
x1=-2.4776145625
alpha1=3.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.0
x1=-2.4776145625
alpha1=3.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.96875
x1=-2.4776145625
alpha1=3.0625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.90625
x1=-2.5028963437
alpha1=3.09375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.888423875
x1=-2.325923875
alpha1=2.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8980806562
x1=-2.3512056562
alpha1=2.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9041161445
x1=-2.3670067695
alpha1=2.92578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8787670937
x1=-2.3006420937
alpha1=2.84375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8727316054
x1=-2.2848409804
alpha1=2.82421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.84375
x1=-2.325923875
alpha1=2.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8203125
x1=-2.325923875
alpha1=2.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9375
x1=-2.325923875
alpha1=2.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9140625
x1=-2.325923875
alpha1=2.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-2.3512056562
alpha1=2.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9077374375
x1=-2.3764874375
alpha1=2.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9173942187
x1=-2.4017692187
alpha1=2.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.923429707
x1=-2.417570332
alpha1=2.98828125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8980806562
x1=-2.3512056562
alpha1=2.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8920451679
x1=-2.3354045429
alpha1=2.88671875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-2.3764874375
alpha1=2.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.84375
x1=-2.3764874375
alpha1=2.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9375
x1=-2.3764874375
alpha1=2.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.96875
x1=-2.3764874375
alpha1=2.9375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-2.4017692187
alpha1=2.96875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.75
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6875
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.71875
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8125
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.78125
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.75
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.71875
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.6953125
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.78125
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8046875
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.84375
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8125
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.78125
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.90625
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.84375
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8125
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.7890625
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8984375
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.0
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.0625
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.03125
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9375
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.96875
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.0
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.03125
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.0546875
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.96875
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9453125
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.125
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.1875
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.15625
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.0625
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.09375
x1=-2.427051
alpha1=3.0
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.125
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.15625
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.1796875
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.09375
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=1.0703125
x1=-2.4523327812
alpha1=3.03125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-2.528178125
alpha1=3.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8125
x1=-2.528178125
alpha1=3.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.84375
x1=-2.528178125
alpha1=3.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9375
x1=-2.528178125
alpha1=3.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.90625
x1=-2.528178125
alpha1=3.125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.875
x1=-2.5534599062
alpha1=3.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.84375
x1=-2.5534599062
alpha1=3.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.8203125
x1=-2.5534599062
alpha1=3.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.90625
x1=-2.5534599062
alpha1=3.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.9296875
x1=-2.5534599062
alpha1=3.15625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.23176275
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2414195312
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2474550195
x1=0.6478456445
alpha1=-0.80078125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2221059687
x1=0.5814809687
alpha1=-0.71875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2160704804
x1=0.5656798554
alpha1=-0.69921875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2578125
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1640625
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.270389875
x1=0.707889875
alpha1=-0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2800466562
x1=0.7331716562
alpha1=-0.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2860821445
x1=0.7489727695
alpha1=-0.92578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2607330937
x1=0.6826080937
alpha1=-0.84375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2546976054
x1=0.6668069804
alpha1=-0.82421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.707889875
alpha1=-0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3359375
x1=0.707889875
alpha1=-0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.707889875
alpha1=-0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2421875
x1=0.707889875
alpha1=-0.875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.7331716562
alpha1=-0.90625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2510763125
x1=0.6573263125
alpha1=-0.8125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2607330937
x1=0.6826080937
alpha1=-0.84375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.266768582
x1=0.698409207
alpha1=-0.86328125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2414195312
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2353840429
x1=0.6162434179
alpha1=-0.76171875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.6573263125
alpha1=-0.8125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.6573263125
alpha1=-0.8125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.6573263125
alpha1=-0.8125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.6573263125
alpha1=-0.8125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.296875
x1=0.6826080937
alpha1=-0.84375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.193135625
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2027924062
x1=0.5309174062
alpha1=-0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2088278945
x1=0.5467185195
alpha1=-0.67578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1834788437
x1=0.4803538437
alpha1=-0.59375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1774433554
x1=0.4645527304
alpha1=-0.57421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.125
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.15625
x1=0.505635625
alpha1=-0.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.5309174062
alpha1=-0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2124491875
x1=0.5561991875
alpha1=-0.6875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2221059687
x1=0.5814809687
alpha1=-0.71875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.228141457
x1=0.597282082
alpha1=-0.73828125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2027924062
x1=0.5309174062
alpha1=-0.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1967569179
x1=0.5151162929
alpha1=-0.63671875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.5561991875
alpha1=-0.6875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=0.5561991875
alpha1=-0.6875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.5561991875
alpha1=-0.6875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.15625
x1=0.5561991875
alpha1=-0.6875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.5814809687
alpha1=-0.71875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4296875
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3203125
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2365911406
x1=0.6194036406
alpha1=-0.765625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.302734375
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3671875
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2734375
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.302734375
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.2734375
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.453125
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4765625
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5234375
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3828125
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4296875
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.453125
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4765625
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.125
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0625
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.09375
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.15625
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.125
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.09375
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0703125
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.15625
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1796875
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.03125
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.03125
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0625
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.09375
x1=0.60676275
alpha1=-0.75
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.03125
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=0.0234375
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0625
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.0859375
x1=0.6320445312
alpha1=-0.78125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4635255
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4731822812
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4792177695
x1=1.2546083945
alpha1=-1.55078125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4538687187
x1=1.1882437187
alpha1=-1.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4478332304
x1=1.1724426054
alpha1=-1.44921875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.53125
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.502152625
x1=1.314652625
alpha1=-1.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5118094062
x1=1.3399344062
alpha1=-1.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5178448945
x1=1.3557355195
alpha1=-1.67578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4924958437
x1=1.2893708437
alpha1=-1.59375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4864603554
x1=1.2735697304
alpha1=-1.57421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5625
x1=1.314652625
alpha1=-1.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.53125
x1=1.314652625
alpha1=-1.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=1.314652625
alpha1=-1.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=1.314652625
alpha1=-1.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.546875
x1=1.3399344062
alpha1=-1.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4828390625
x1=1.2640890625
alpha1=-1.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4924958437
x1=1.2893708437
alpha1=-1.59375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.498531332
x1=1.305171957
alpha1=-1.61328125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4731822812
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4671467929
x1=1.2230061679
alpha1=-1.51171875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.53125
x1=1.2640890625
alpha1=-1.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5078125
x1=1.2640890625
alpha1=-1.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=1.2640890625
alpha1=-1.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4140625
x1=1.2640890625
alpha1=-1.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4599609375
x1=1.2640890625
alpha1=-1.5625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.424898375
x1=1.112398375
alpha1=-1.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4345551562
x1=1.1376801562
alpha1=-1.40625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4405906445
x1=1.1534812695
alpha1=-1.42578125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4152415937
x1=1.0871165937
alpha1=-1.34375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4092061054
x1=1.0713154804
alpha1=-1.32421875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=1.112398375
alpha1=-1.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4921875
x1=1.112398375
alpha1=-1.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=1.112398375
alpha1=-1.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3984375
x1=1.112398375
alpha1=-1.375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=1.1376801562
alpha1=-1.40625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4442119375
x1=1.1629619375
alpha1=-1.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4538687187
x1=1.1882437187
alpha1=-1.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.459904207
x1=1.204044832
alpha1=-1.48828125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4345551562
x1=1.1376801562
alpha1=-1.40625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4285196679
x1=1.1218790429
alpha1=-1.38671875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=1.1629619375
alpha1=-1.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.46875
x1=1.1629619375
alpha1=-1.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=1.1629619375
alpha1=-1.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=1.1629619375
alpha1=-1.4375
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=1.1882437187
alpha1=-1.46875
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.625
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6875
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.65625
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5625
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.59375
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.625
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.65625
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6796875
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.59375
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5703125
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.546875
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.59375
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5703125
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6171875
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4765625
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5234375
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.546875
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.59375
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5703125
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1875
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.25
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.21875
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.1953125
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.28125
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3046875
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3125
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4375
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=1.2135255
alpha1=-1.5
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.375
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.34375
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.3203125
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.40625
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.4296875
x1=1.2388072812
alpha1=-1.53125
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.59375
x1=1.314652625
alpha1=-1.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.625
x1=1.314652625
alpha1=-1.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.65625
x1=1.314652625
alpha1=-1.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5625
x1=1.314652625
alpha1=-1.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.53125
x1=1.314652625
alpha1=-1.625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.59375
x1=1.3399344062
alpha1=-1.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.625
x1=1.3399344062
alpha1=-1.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.6484375
x1=1.3399344062
alpha1=-1.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5625
x1=1.3399344062
alpha1=-1.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
theta1=4.712389
y1=-0.5390625
x1=1.3399344062
alpha1=-1.65625
labels.append(-1)
xsampling=[x1**2,y1**2]
data_points.append(xsampling)
variables=["x1**2","y1**2"]
error_percentage=100;
error_old=500;
net_samples=np.array(labels).shape[0]
num_sample_store=[net_samples]
error_store=[]
target=open('/home/sumanth/Documents/Toyota_delete_after_done/Osiris2/src/abstractor/classifier_python_files/formulas','w')
while(error_percentage>=0.1):
(error,w,ypred)=svm_classifier(np.array(data_points), np.array(labels),2)
formula_list=[]
formula_writer=""
error_percentage=np.sum(np.abs(error))/net_samples
print(error_percentage)
new_data_points=[]
new_labels=[]
new_labels=[]
new_y_pred=[]
for i in range(error.shape[0]):
if((error[i] == 1) or (error[i]==-1) or labels[i]==1):
#mode has to be 1 if y(i)==-1
new_labels.append(labels[i])
new_data_points.append(data_points[i])
new_y_pred.append(ypred[i,0])
num_samples_temp=len(new_data_points)
num_sample_store.append(num_samples_temp);
print(num_sample_store[-1])
if(num_sample_store[-1]<num_sample_store[-2]):
print(num_sample_store[-1])
data_points=new_data_points
ypred=np.array(new_y_pred)
labels=new_labels
error_old=error_percentage
error_store.append(error_percentage)
for w_writer in range(w.shape[0]):
if(w_writer==0):
formula_writer='{:.20f}'.format(w[0,0])
else:
formula_writer='{:.20f}'.format(w[w_writer,0])+"*("+variables[w_writer-1]+")"
formula_list.append(formula_writer)
concatenated_formula =""
for form in formula_list:
concatenated_formula=concatenated_formula+"+"+form
concatenated_formula="("+concatenated_formula[1:]+")>0"
target.write(concatenated_formula)
if(error_percentage>=0.01):
target.write("," )
| 18.844955
| 123
| 0.761881
| 48,281
| 264,847
| 4.136265
| 0.02299
| 0.031562
| 0.050099
| 0.060119
| 0.965143
| 0.962765
| 0.962765
| 0.962765
| 0.962084
| 0.962084
| 0
| 0.315933
| 0.06187
| 264,847
| 14,053
| 124
| 18.846296
| 0.487827
| 0.000106
| 0
| 0.943211
| 0
| 0
| 0.000521
| 0.000393
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.000356
| 0
| 0.000356
| 0.000213
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
af6611cdad67cbcfbcf60d1394e4a979ff9ae943
| 360
|
py
|
Python
|
Modulos Proprios/vendas/calc_preco.py
|
pinheirogus/Curso-Python-Udemy
|
d6d52320426172e924081b9df619490baa8c6016
|
[
"MIT"
] | 1
|
2021-09-01T01:58:13.000Z
|
2021-09-01T01:58:13.000Z
|
Modulos Proprios/vendas/calc_preco.py
|
pinheirogus/Curso-Python-Udemy
|
d6d52320426172e924081b9df619490baa8c6016
|
[
"MIT"
] | null | null | null |
Modulos Proprios/vendas/calc_preco.py
|
pinheirogus/Curso-Python-Udemy
|
d6d52320426172e924081b9df619490baa8c6016
|
[
"MIT"
] | null | null | null |
from vendas.formata import preco
def aumento(valor, porcentagem, formatacao = False):
r = valor + (valor * (porcentagem / 100))
if formatacao:
return preco.real(r)
return r
def reducao(valor, porcentagem, formatacao = False):
r = valor - (valor * (porcentagem / 100))
if formatacao:
return preco.real(r)
return r
| 18.947368
| 52
| 0.638889
| 43
| 360
| 5.348837
| 0.395349
| 0.278261
| 0.226087
| 0.269565
| 0.791304
| 0.791304
| 0.791304
| 0.791304
| 0.791304
| 0.791304
| 0
| 0.022472
| 0.258333
| 360
| 18
| 53
| 20
| 0.838951
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.090909
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
af8afc69502425bc86448d4ea45f6b9d49c8ca5f
| 124
|
py
|
Python
|
gsplines/services/__init__.py
|
rafaelrojasmiliani/gsplines
|
663b10f6d53b498a1e892d9eb32a345153de36d2
|
[
"MIT"
] | 3
|
2021-08-28T01:42:40.000Z
|
2021-12-02T22:39:45.000Z
|
gsplines/services/__init__.py
|
rafaelrojasmiliani/gsplines
|
663b10f6d53b498a1e892d9eb32a345153de36d2
|
[
"MIT"
] | null | null | null |
gsplines/services/__init__.py
|
rafaelrojasmiliani/gsplines
|
663b10f6d53b498a1e892d9eb32a345153de36d2
|
[
"MIT"
] | null | null | null |
from .gsplinesjson import piecewise2json
from .gsplinesjson import json2piecewise
from .xmlrpc import cGplineXMLRPCServer
| 24.8
| 41
| 0.862903
| 12
| 124
| 8.916667
| 0.583333
| 0.299065
| 0.411215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018182
| 0.112903
| 124
| 4
| 42
| 31
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
af9983c04edd8520181bdff6f2f5533dc6f72f9f
| 637
|
py
|
Python
|
test_for_lesson.py
|
Lutiklut/lesson6
|
8fc22af345fa21881c5b9fe53b457a82f2825155
|
[
"Apache-2.0"
] | null | null | null |
test_for_lesson.py
|
Lutiklut/lesson6
|
8fc22af345fa21881c5b9fe53b457a82f2825155
|
[
"Apache-2.0"
] | null | null | null |
test_for_lesson.py
|
Lutiklut/lesson6
|
8fc22af345fa21881c5b9fe53b457a82f2825155
|
[
"Apache-2.0"
] | null | null | null |
import divisor_master
# Test2
# проверка не кратно ли полученное число 2.
def test2():
assert int(divisor_master.big_simple_denominator(38))%2!=0
# Test3
# проверка правильности получаемых результатов
def test3():
assert int(divisor_master.big_simple_denominator(38))==19
# Test 4
#проверка выводит ли функция самый большой делитель (не обязательно простой) числа.
def test4():
assert int(divisor_master.big_denominators(50)) == 50
# Test 5
#проверка выводит ли функция самый большой делитель (не обязательно простой) числа.
def test4():
assert int(divisor_master.big_denominators(50)) == 50
| 25.48
| 88
| 0.736264
| 85
| 637
| 5.388235
| 0.447059
| 0.141921
| 0.139738
| 0.19214
| 0.707424
| 0.707424
| 0.707424
| 0.707424
| 0.515284
| 0.515284
| 0
| 0.047619
| 0.175824
| 637
| 25
| 89
| 25.48
| 0.824762
| 0.436421
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.444444
| true
| 0
| 0.111111
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
bbb7621b83f0da928819d0491c4cbfc1cb79387d
| 102
|
py
|
Python
|
testing/lowercasetf.py
|
worldwalker2000/pyxx
|
8c6f129042241ca8b0eb274a69ca56b2ac1261cb
|
[
"MIT"
] | 4
|
2021-12-29T22:44:57.000Z
|
2022-01-21T17:27:35.000Z
|
testing/lowercasetf.py
|
worldwalker2000/pyxx
|
8c6f129042241ca8b0eb274a69ca56b2ac1261cb
|
[
"MIT"
] | 1
|
2022-03-09T20:56:56.000Z
|
2022-03-09T21:57:04.000Z
|
testing/lowercasetf.py
|
worldwalker2000/pyxx
|
8c6f129042241ca8b0eb274a69ca56b2ac1261cb
|
[
"MIT"
] | null | null | null |
if True :
print(True)
else :
print(False)
if not False :
print(True)
else :
print(False)
| 7.285714
| 14
| 0.607843
| 15
| 102
| 4.133333
| 0.4
| 0.290323
| 0.419355
| 0.580645
| 0.741935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.27451
| 102
| 13
| 15
| 7.846154
| 0.837838
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
bbbee393699b2f4dcad3236d5390c10627aa55c5
| 155
|
py
|
Python
|
ganadores/admin.py
|
mmzepedab/pepsi
|
4d4d8e70a65a1a9e70f71221fa6c8a01d0422523
|
[
"MIT"
] | null | null | null |
ganadores/admin.py
|
mmzepedab/pepsi
|
4d4d8e70a65a1a9e70f71221fa6c8a01d0422523
|
[
"MIT"
] | null | null | null |
ganadores/admin.py
|
mmzepedab/pepsi
|
4d4d8e70a65a1a9e70f71221fa6c8a01d0422523
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from django.contrib import admin
from .models import Ganador
admin.site.register(Ganador)
| 17.222222
| 32
| 0.806452
| 22
| 155
| 5.681818
| 0.5
| 0.16
| 0.272
| 0.368
| 0.448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135484
| 155
| 8
| 33
| 19.375
| 0.932836
| 0.167742
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bbe1b1df8ebdee8d8a760ab91d44562a35326667
| 167
|
py
|
Python
|
ibsng/handler/log_console/get_console_buffer.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 6
|
2018-03-06T10:16:36.000Z
|
2021-12-05T12:43:10.000Z
|
ibsng/handler/log_console/get_console_buffer.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 3
|
2018-03-06T10:27:08.000Z
|
2022-01-02T15:21:27.000Z
|
ibsng/handler/log_console/get_console_buffer.py
|
ParspooyeshFanavar/pyibsng
|
d48bcf4f25e3f23461528bf0ff8870cc3d537444
|
[
"MIT"
] | 3
|
2018-01-06T16:28:31.000Z
|
2018-09-17T19:47:19.000Z
|
"""Get console buffer API method."""
from ibsng.handler.handler import Handler
class getConsoleBuffer(Handler):
"""Get console buffer method class."""
pass
| 18.555556
| 42
| 0.718563
| 20
| 167
| 6
| 0.6
| 0.166667
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167665
| 167
| 8
| 43
| 20.875
| 0.863309
| 0.377246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
bbe82ef09197ab0f4344d417ad7d8beb53b81071
| 512
|
py
|
Python
|
mercury/utils/initiate_layer.py
|
ludius0/Mercury
|
19831025a7325c59d77e9d430df4fd9167d36846
|
[
"MIT"
] | null | null | null |
mercury/utils/initiate_layer.py
|
ludius0/Mercury
|
19831025a7325c59d77e9d430df4fd9167d36846
|
[
"MIT"
] | null | null | null |
mercury/utils/initiate_layer.py
|
ludius0/Mercury
|
19831025a7325c59d77e9d430df4fd9167d36846
|
[
"MIT"
] | null | null | null |
import numpy as np
def _uniform(a, b, dtype=np.float32):
return np.random.uniform(-1., 1., size=(a, b)).astype(dtype) \
/ np.sqrt(a*b)
def _gaussian(a, b, dtype=np.float32):
return np.random.randn(a, b).astype(dtype) \
/ np.sqrt(a * b)
def _xavier(a, b, dtype=np.float32):
return np.random.uniform(-1., 1.).astype(dtype)\
* np.sqrt(6./(a + b))
def _kaiming(a, b, dtype=np.float32):
return np.random.randn(a, b).astype(dtype) \
* np.sqrt(2./a*b)
| 30.117647
| 67
| 0.576172
| 85
| 512
| 3.423529
| 0.258824
| 0.075601
| 0.09622
| 0.123711
| 0.738832
| 0.738832
| 0.738832
| 0.738832
| 0.738832
| 0.639175
| 0
| 0.035354
| 0.226563
| 512
| 17
| 68
| 30.117647
| 0.699495
| 0
| 0
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.307692
| false
| 0
| 0.076923
| 0.307692
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
a562e58cceb30efb58cadc31ee0616a5de44481c
| 98,994
|
py
|
Python
|
angr/procedures/definitions/win32_resutils.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_resutils.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_resutils.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("resutils.dll")
prototypes = \
{
#
'InitializeClusterHealthFault': SimTypeFunction([SimTypePointer(SimStruct({"Id": SimTypePointer(SimTypeChar(label="Char"), offset=0), "ErrorType": SimTypeInt(signed=False, label="UInt32"), "ErrorCode": SimTypeInt(signed=False, label="UInt32"), "Description": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Provider": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Flags": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeInt(signed=False, label="UInt32")}, name="CLUSTER_HEALTH_FAULT", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["clusterHealthFault"]),
#
'InitializeClusterHealthFaultArray': SimTypeFunction([SimTypePointer(SimStruct({"numFaults": SimTypeInt(signed=False, label="UInt32"), "faults": SimTypePointer(SimStruct({"Id": SimTypePointer(SimTypeChar(label="Char"), offset=0), "ErrorType": SimTypeInt(signed=False, label="UInt32"), "ErrorCode": SimTypeInt(signed=False, label="UInt32"), "Description": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Provider": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Flags": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeInt(signed=False, label="UInt32")}, name="CLUSTER_HEALTH_FAULT", pack=False, align=None), offset=0)}, name="CLUSTER_HEALTH_FAULT_ARRAY", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["clusterHealthFaultArray"]),
#
'FreeClusterHealthFault': SimTypeFunction([SimTypePointer(SimStruct({"Id": SimTypePointer(SimTypeChar(label="Char"), offset=0), "ErrorType": SimTypeInt(signed=False, label="UInt32"), "ErrorCode": SimTypeInt(signed=False, label="UInt32"), "Description": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Provider": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Flags": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeInt(signed=False, label="UInt32")}, name="CLUSTER_HEALTH_FAULT", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["clusterHealthFault"]),
#
'FreeClusterHealthFaultArray': SimTypeFunction([SimTypePointer(SimStruct({"numFaults": SimTypeInt(signed=False, label="UInt32"), "faults": SimTypePointer(SimStruct({"Id": SimTypePointer(SimTypeChar(label="Char"), offset=0), "ErrorType": SimTypeInt(signed=False, label="UInt32"), "ErrorCode": SimTypeInt(signed=False, label="UInt32"), "Description": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Provider": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Flags": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeInt(signed=False, label="UInt32")}, name="CLUSTER_HEALTH_FAULT", pack=False, align=None), offset=0)}, name="CLUSTER_HEALTH_FAULT_ARRAY", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["clusterHealthFaultArray"]),
#
'ClusGetClusterHealthFaults': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"numFaults": SimTypeInt(signed=False, label="UInt32"), "faults": SimTypePointer(SimStruct({"Id": SimTypePointer(SimTypeChar(label="Char"), offset=0), "ErrorType": SimTypeInt(signed=False, label="UInt32"), "ErrorCode": SimTypeInt(signed=False, label="UInt32"), "Description": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Provider": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Flags": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeInt(signed=False, label="UInt32")}, name="CLUSTER_HEALTH_FAULT", pack=False, align=None), offset=0)}, name="CLUSTER_HEALTH_FAULT_ARRAY", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "objects", "flags"]),
#
'ClusRemoveClusterHealthFault': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "id", "flags"]),
#
'ClusAddClusterHealthFault': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"Id": SimTypePointer(SimTypeChar(label="Char"), offset=0), "ErrorType": SimTypeInt(signed=False, label="UInt32"), "ErrorCode": SimTypeInt(signed=False, label="UInt32"), "Description": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Provider": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Flags": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeInt(signed=False, label="UInt32")}, name="CLUSTER_HEALTH_FAULT", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "failure", "param2"]),
#
'ResUtilStartResourceService': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszServiceName", "phServiceHandle"]),
#
'ResUtilVerifyResourceService': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszServiceName"]),
#
'ResUtilStopResourceService': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszServiceName"]),
#
'ResUtilVerifyService': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hServiceHandle"]),
#
'ResUtilStopService': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hServiceHandle"]),
#
'ResUtilCreateDirectoryTree': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszPath"]),
#
'ResUtilIsPathValid': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pszPath"]),
#
'ResUtilEnumProperties': SimTypeFunction([SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyTable", "pszOutProperties", "cbOutPropertiesSize", "pcbBytesReturned", "pcbRequired"]),
#
'ResUtilEnumPrivateProperties': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pszOutProperties", "cbOutPropertiesSize", "pcbBytesReturned", "pcbRequired"]),
#
'ResUtilGetProperties': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTable", "pOutPropertyList", "cbOutPropertyListSize", "pcbBytesReturned", "pcbRequired"]),
#
'ResUtilGetAllProperties': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTable", "pOutPropertyList", "cbOutPropertyListSize", "pcbBytesReturned", "pcbRequired"]),
#
'ResUtilGetPrivateProperties': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pOutPropertyList", "cbOutPropertyListSize", "pcbBytesReturned", "pcbRequired"]),
#
'ResUtilGetPropertySize': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTableItem", "pcbOutPropertyListSize", "pnPropertyCount"]),
#
'ResUtilGetProperty': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTableItem", "pOutPropertyItem", "pcbOutPropertyItemSize"]),
#
'ResUtilVerifyPropertyTable': SimTypeFunction([SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyTable", "Reserved", "bAllowUnknownProperties", "pInPropertyList", "cbInPropertyListSize", "pOutParams"]),
#
'ResUtilSetPropertyTable': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTable", "Reserved", "bAllowUnknownProperties", "pInPropertyList", "cbInPropertyListSize", "pOutParams"]),
#
'ResUtilSetPropertyTableEx': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTable", "Reserved", "bAllowUnknownProperties", "pInPropertyList", "cbInPropertyListSize", "bForceWrite", "pOutParams"]),
#
'ResUtilSetPropertyParameterBlock': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTable", "Reserved", "pInParams", "pInPropertyList", "cbInPropertyListSize", "pOutParams"]),
#
'ResUtilSetPropertyParameterBlockEx': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTable", "Reserved", "pInParams", "pInPropertyList", "cbInPropertyListSize", "bForceWrite", "pOutParams"]),
#
'ResUtilSetUnknownProperties': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTable", "pInPropertyList", "cbInPropertyListSize"]),
#
'ResUtilGetPropertiesToParameterBlock': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTable", "pOutParams", "bCheckForRequiredProperties", "pszNameOfPropInError"]),
#
'ResUtilPropertyListFromParameterBlock': SimTypeFunction([SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyTable", "pOutPropertyList", "pcbOutPropertyListSize", "pInParams", "pcbBytesReturned", "pcbRequired"]),
#
'ResUtilDupParameterBlock': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pOutParams", "pInParams", "pPropertyTable"]),
#
'ResUtilFreeParameterBlock': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0)], SimTypeBottom(label="Void"), arg_names=["pOutParams", "pInParams", "pPropertyTable"]),
#
'ResUtilAddUnknownProperties': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pPropertyTable", "pOutPropertyList", "pcbOutPropertyListSize", "pcbBytesReturned", "pcbRequired"]),
#
'ResUtilSetPrivatePropertyList': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pInPropertyList", "cbInPropertyListSize"]),
#
'ResUtilVerifyPrivatePropertyList': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["pInPropertyList", "cbInPropertyListSize"]),
#
'ResUtilDupString': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypePointer(SimTypeChar(label="Char"), offset=0), arg_names=["pszInString"]),
#
'ResUtilGetBinaryValue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pszValueName", "ppbOutValue", "pcbOutValueSize"]),
#
'ResUtilGetSzValue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypePointer(SimTypeChar(label="Char"), offset=0), arg_names=["hkeyClusterKey", "pszValueName"]),
#
'ResUtilGetDwordValue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pszValueName", "pdwOutValue", "dwDefaultValue"]),
#
'ResUtilGetQwordValue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0), SimTypeLongLong(signed=False, label="UInt64")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pszValueName", "pqwOutValue", "qwDefaultValue"]),
#
'ResUtilSetBinaryValue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pszValueName", "pbNewValue", "cbNewValueSize", "ppbOutValue", "pcbOutValueSize"]),
#
'ResUtilSetSzValue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pszValueName", "pszNewValue", "ppszOutString"]),
#
'ResUtilSetExpandSzValue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pszValueName", "pszNewValue", "ppszOutString"]),
#
'ResUtilSetMultiSzValue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pszValueName", "pszNewValue", "cbNewValueSize", "ppszOutValue", "pcbOutValueSize"]),
#
'ResUtilSetDwordValue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pszValueName", "dwNewValue", "pdwOutValue"]),
#
'ResUtilSetQwordValue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeLongLong(signed=False, label="UInt64"), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "pszValueName", "qwNewValue", "pqwOutValue"]),
#
'ResUtilSetValueEx': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hkeyClusterKey", "valueName", "valueType", "valueData", "valueSize", "flags"]),
#
'ResUtilGetBinaryProperty': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"__AnonymousBase_clusapi_L5092_C41": SimStruct({"Syntax": SimUnion({"dw": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimStruct({"wFormat": SimTypeShort(signed=False, label="UInt16"), "wType": SimTypeShort(signed=False, label="UInt16")}, name="_Anonymous_e__Struct", pack=False, align=None)}, name="<anon>", label="None"), "cbLength": SimTypeInt(signed=False, label="UInt32")}, name="CLUSPROP_VALUE", pack=False, align=None), "rgb": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CLUSPROP_BINARY", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["ppbOutValue", "pcbOutValueSize", "pValueStruct", "pbOldValue", "cbOldValueSize", "ppPropertyList", "pcbPropertyListSize"]),
#
'ResUtilGetSzProperty': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), SimTypePointer(SimStruct({"__AnonymousBase_clusapi_L5132_C37": SimStruct({"Syntax": SimUnion({"dw": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimStruct({"wFormat": SimTypeShort(signed=False, label="UInt16"), "wType": SimTypeShort(signed=False, label="UInt16")}, name="_Anonymous_e__Struct", pack=False, align=None)}, name="<anon>", label="None"), "cbLength": SimTypeInt(signed=False, label="UInt32")}, name="CLUSPROP_VALUE", pack=False, align=None), "sz": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="CLUSPROP_SZ", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["ppszOutValue", "pValueStruct", "pszOldValue", "ppPropertyList", "pcbPropertyListSize"]),
#
'ResUtilGetMultiSzProperty': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"__AnonymousBase_clusapi_L5132_C37": SimStruct({"Syntax": SimUnion({"dw": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimStruct({"wFormat": SimTypeShort(signed=False, label="UInt16"), "wType": SimTypeShort(signed=False, label="UInt16")}, name="_Anonymous_e__Struct", pack=False, align=None)}, name="<anon>", label="None"), "cbLength": SimTypeInt(signed=False, label="UInt32")}, name="CLUSPROP_VALUE", pack=False, align=None), "sz": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="CLUSPROP_SZ", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["ppszOutValue", "pcbOutValueSize", "pValueStruct", "pszOldValue", "cbOldValueSize", "ppPropertyList", "pcbPropertyListSize"]),
#
'ResUtilGetDwordProperty': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"__AnonymousBase_clusapi_L5112_C40": SimStruct({"Syntax": SimUnion({"dw": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimStruct({"wFormat": SimTypeShort(signed=False, label="UInt16"), "wType": SimTypeShort(signed=False, label="UInt16")}, name="_Anonymous_e__Struct", pack=False, align=None)}, name="<anon>", label="None"), "cbLength": SimTypeInt(signed=False, label="UInt32")}, name="CLUSPROP_VALUE", pack=False, align=None), "dw": SimTypeInt(signed=False, label="UInt32")}, name="CLUSPROP_DWORD", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pdwOutValue", "pValueStruct", "dwOldValue", "dwMinimum", "dwMaximum", "ppPropertyList", "pcbPropertyListSize"]),
#
'ResUtilGetLongProperty': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), SimTypePointer(SimStruct({"__AnonymousBase_clusapi_L5122_C39": SimStruct({"Syntax": SimUnion({"dw": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimStruct({"wFormat": SimTypeShort(signed=False, label="UInt16"), "wType": SimTypeShort(signed=False, label="UInt16")}, name="_Anonymous_e__Struct", pack=False, align=None)}, name="<anon>", label="None"), "cbLength": SimTypeInt(signed=False, label="UInt32")}, name="CLUSPROP_VALUE", pack=False, align=None), "l": SimTypeInt(signed=True, label="Int32")}, name="CLUSPROP_LONG", pack=False, align=None), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["plOutValue", "pValueStruct", "lOldValue", "lMinimum", "lMaximum", "ppPropertyList", "pcbPropertyListSize"]),
#
'ResUtilGetFileTimeProperty': SimTypeFunction([SimTypePointer(SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"__AnonymousBase_clusapi_L5188_C14": SimStruct({"Syntax": SimUnion({"dw": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimStruct({"wFormat": SimTypeShort(signed=False, label="UInt16"), "wType": SimTypeShort(signed=False, label="UInt16")}, name="_Anonymous_e__Struct", pack=False, align=None)}, name="<anon>", label="None"), "cbLength": SimTypeInt(signed=False, label="UInt32")}, name="CLUSPROP_VALUE", pack=False, align=None), "ft": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="CLUSPROP_FILETIME", pack=False, align=None), offset=0), SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pftOutValue", "pValueStruct", "ftOldValue", "ftMinimum", "ftMaximum", "ppPropertyList", "pcbPropertyListSize"]),
#
'ResUtilGetEnvironmentWithNetName': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0)], SimTypePointer(SimTypeBottom(label="Void"), offset=0), arg_names=["hResource"]),
#
'ResUtilFreeEnvironment': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpEnvironment"]),
#
'ResUtilExpandEnvironmentStrings': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypePointer(SimTypeChar(label="Char"), offset=0), arg_names=["pszSrc"]),
#
'ResUtilSetResourceServiceEnvironment': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="LOG_LEVEL"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeBottom(label="Void"), arg_names=["ResourceHandle", "LogLevel", "FormatString"]), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszServiceName", "hResource", "pfnLogEvent", "hResourceHandle"]),
#
'ResUtilRemoveResourceServiceEnvironment': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="LOG_LEVEL"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeBottom(label="Void"), arg_names=["ResourceHandle", "LogLevel", "FormatString"]), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszServiceName", "pfnLogEvent", "hResourceHandle"]),
#
'ResUtilSetResourceServiceStartParameters': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="LOG_LEVEL"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeBottom(label="Void"), arg_names=["ResourceHandle", "LogLevel", "FormatString"]), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszServiceName", "schSCMHandle", "phService", "pfnLogEvent", "hResourceHandle"]),
#
'ResUtilFindSzProperty': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyList", "cbPropertyListSize", "pszPropertyName", "pszPropertyValue"]),
#
'ResUtilFindExpandSzProperty': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyList", "cbPropertyListSize", "pszPropertyName", "pszPropertyValue"]),
#
'ResUtilFindExpandedSzProperty': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyList", "cbPropertyListSize", "pszPropertyName", "pszPropertyValue"]),
#
'ResUtilFindDwordProperty': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyList", "cbPropertyListSize", "pszPropertyName", "pdwPropertyValue"]),
#
'ResUtilFindBinaryProperty': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyList", "cbPropertyListSize", "pszPropertyName", "pbPropertyValue", "pcbPropertyValueSize"]),
#
'ResUtilFindMultiSzProperty': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyList", "cbPropertyListSize", "pszPropertyName", "pszPropertyValue", "pcbPropertyValueSize"]),
#
'ResUtilFindLongProperty': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyList", "cbPropertyListSize", "pszPropertyName", "plPropertyValue"]),
#
'ResUtilFindULargeIntegerProperty': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyList", "cbPropertyListSize", "pszPropertyName", "plPropertyValue"]),
#
'ResUtilFindFileTimeProperty': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyList", "cbPropertyListSize", "pszPropertyName", "pftPropertyValue"]),
#
'ClusWorkerCreate': SimTypeFunction([SimTypePointer(SimStruct({"hThread": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Terminate": SimTypeInt(signed=True, label="Int32")}, name="CLUS_WORKER", pack=False, align=None), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimStruct({"hThread": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Terminate": SimTypeInt(signed=True, label="Int32")}, name="CLUS_WORKER", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pWorker", "lpThreadParameter"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpWorker", "lpStartAddress", "lpParameter"]),
#
'ClusWorkerCheckTerminate': SimTypeFunction([SimTypePointer(SimStruct({"hThread": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Terminate": SimTypeInt(signed=True, label="Int32")}, name="CLUS_WORKER", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["lpWorker"]),
#
'ClusWorkerTerminate': SimTypeFunction([SimTypePointer(SimStruct({"hThread": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Terminate": SimTypeInt(signed=True, label="Int32")}, name="CLUS_WORKER", pack=False, align=None), offset=0)], SimTypeBottom(label="Void"), arg_names=["lpWorker"]),
#
'ClusWorkerTerminateEx': SimTypeFunction([SimTypePointer(SimStruct({"hThread": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Terminate": SimTypeInt(signed=True, label="Int32")}, name="CLUS_WORKER", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["ClusWorker", "TimeoutInMilliseconds", "WaitOnly"]),
#
'ClusWorkersTerminate': SimTypeFunction([SimTypePointer(SimTypePointer(SimStruct({"hThread": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Terminate": SimTypeInt(signed=True, label="Int32")}, name="CLUS_WORKER", pack=False, align=None), offset=0), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["ClusWorkers", "ClusWorkersCount", "TimeoutInMilliseconds", "WaitOnly"]),
#
'ResUtilResourcesEqual': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hSelf", "hResource"]),
#
'ResUtilResourceTypesEqual': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["lpszResourceTypeName", "hResource"]),
#
'ResUtilIsResourceClassEqual': SimTypeFunction([SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"Anonymous": SimUnion({"dw": SimTypeInt(signed=False, label="UInt32"), "rc": SimTypeInt(signed=False, label="CLUSTER_RESOURCE_CLASS")}, name="<anon>", label="None"), "SubClass": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "li": SimTypeBottom(label="ULARGE_INTEGER")}, name="<anon>", label="None")}, name="CLUS_RESOURCE_CLASS_INFO", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["prci", "hResource"]),
#
'ResUtilEnumResources': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hSelf", "lpszResTypeName", "pResCallBack", "pParameter"]),
#
'ResUtilEnumResourcesEx': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2", "param3"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "hSelf", "lpszResTypeName", "pResCallBack", "pParameter"]),
#
'ResUtilGetResourceDependency': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), arg_names=["hSelf", "lpszResourceType"]),
#
'ResUtilGetResourceDependencyByName': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), arg_names=["hCluster", "hSelf", "lpszResourceType", "bRecurse"]),
#
'ResUtilGetResourceDependencyByClass': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"Anonymous": SimUnion({"dw": SimTypeInt(signed=False, label="UInt32"), "rc": SimTypeInt(signed=False, label="CLUSTER_RESOURCE_CLASS")}, name="<anon>", label="None"), "SubClass": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "li": SimTypeBottom(label="ULARGE_INTEGER")}, name="<anon>", label="None")}, name="CLUS_RESOURCE_CLASS_INFO", pack=False, align=None), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), arg_names=["hCluster", "hSelf", "prci", "bRecurse"]),
#
'ResUtilGetResourceNameDependency': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), arg_names=["lpszResourceName", "lpszResourceType"]),
#
'ResUtilGetResourceDependentIPAddressProps': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hResource", "pszAddress", "pcchAddress", "pszSubnetMask", "pcchSubnetMask", "pszNetwork", "pcchNetwork"]),
#
'ResUtilFindDependentDiskResourceDriveLetter': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "hResource", "pszDriveLetter", "pcchDriveLetter"]),
#
'ResUtilTerminateServiceProcessFromResDll': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="LOG_LEVEL"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeBottom(label="Void"), arg_names=["ResourceHandle", "LogLevel", "FormatString"]), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["dwServicePid", "bOffline", "pdwResourceState", "pfnLogEvent", "hResourceHandle"]),
#
'ResUtilGetPropertyFormats': SimTypeFunction([SimTypePointer(SimStruct({"Name": SimTypePointer(SimTypeChar(label="Char"), offset=0), "KeyName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Format": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"DefaultPtr": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "Default": SimTypeInt(signed=False, label="UInt32"), "lpDefault": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "LargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="LARGE_INTEGER"), "Minimum": SimTypeBottom(label="LARGE_INTEGER"), "Maximum": SimTypeBottom(label="LARGE_INTEGER")}, name="RESUTIL_LARGEINT_DATA", pack=False, align=None), offset=0), "ULargeIntData": SimTypePointer(SimStruct({"Default": SimTypeBottom(label="ULARGE_INTEGER"), "Minimum": SimTypeBottom(label="ULARGE_INTEGER"), "Maximum": SimTypeBottom(label="ULARGE_INTEGER")}, name="RESUTIL_ULARGEINT_DATA", pack=False, align=None), offset=0), "FileTimeData": SimTypePointer(SimStruct({"Default": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Minimum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "Maximum": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None)}, name="RESUTIL_FILETIME_DATA", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Minimum": SimTypeInt(signed=False, label="UInt32"), "Maximum": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Offset": SimTypeInt(signed=False, label="UInt32")}, name="RESUTIL_PROPERTY_ITEM", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pPropertyTable", "pOutPropertyFormatList", "cbPropertyFormatListSize", "pcbBytesReturned", "pcbRequired"]),
#
'ResUtilGetCoreClusterResources': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "phClusterNameResource", "phClusterIPAddressResource", "phClusterQuorumResource"]),
#
'ResUtilGetResourceName': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hResource", "pszResourceName", "pcchResourceNameInOut"]),
#
'ResUtilGetClusterRoleState': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="CLUSTER_ROLE")], SimTypeInt(signed=False, label="CLUSTER_ROLE_STATE"), arg_names=["hCluster", "eClusterRole"]),
#
'ClusterIsPathOnSharedVolume': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["lpszPathName"]),
#
'ClusterGetVolumePathName': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["lpszFileName", "lpszVolumePathName", "cchBufferLength"]),
#
'ClusterGetVolumeNameForVolumeMountPoint': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["lpszVolumeMountPoint", "lpszVolumeName", "cchBufferLength"]),
#
'ClusterPrepareSharedVolumeForBackup': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpszFileName", "lpszVolumePathName", "lpcchVolumePathName", "lpszVolumeName", "lpcchVolumeName"]),
#
'ClusterClearBackupStateForSharedVolume': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpszVolumePathName"]),
#
'ResUtilSetResourceServiceStartParametersEx': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="LOG_LEVEL"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeBottom(label="Void"), arg_names=["ResourceHandle", "LogLevel", "FormatString"]), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszServiceName", "schSCMHandle", "phService", "dwDesiredAccess", "pfnLogEvent", "hResourceHandle"]),
#
'ResUtilEnumResourcesEx2': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2", "param3"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "hSelf", "lpszResTypeName", "pResCallBack", "pParameter", "dwDesiredAccess"]),
#
'ResUtilGetResourceDependencyEx': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), arg_names=["hSelf", "lpszResourceType", "dwDesiredAccess"]),
#
'ResUtilGetResourceDependencyByNameEx': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), arg_names=["hCluster", "hSelf", "lpszResourceType", "bRecurse", "dwDesiredAccess"]),
#
'ResUtilGetResourceDependencyByClassEx': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"Anonymous": SimUnion({"dw": SimTypeInt(signed=False, label="UInt32"), "rc": SimTypeInt(signed=False, label="CLUSTER_RESOURCE_CLASS")}, name="<anon>", label="None"), "SubClass": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous_e__Struct", pack=False, align=None), "li": SimTypeBottom(label="ULARGE_INTEGER")}, name="<anon>", label="None")}, name="CLUS_RESOURCE_CLASS_INFO", pack=False, align=None), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), arg_names=["hCluster", "hSelf", "prci", "bRecurse", "dwDesiredAccess"]),
#
'ResUtilGetResourceNameDependencyEx': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), arg_names=["lpszResourceName", "lpszResourceType", "dwDesiredAccess"]),
#
'ResUtilGetCoreClusterResourcesEx': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hClusterIn", "phClusterNameResourceOut", "phClusterQuorumResourceOut", "dwDesiredAccess"]),
#
'OpenClusterCryptProvider': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="SByte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimStruct({}, name="_HCLUSCRYPTPROVIDER", pack=False, align=None), offset=0), arg_names=["lpszResource", "lpszProvider", "dwType", "dwFlags"]),
#
'OpenClusterCryptProviderEx': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="SByte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimStruct({}, name="_HCLUSCRYPTPROVIDER", pack=False, align=None), offset=0), arg_names=["lpszResource", "lpszKeyname", "lpszProvider", "dwType", "dwFlags"]),
#
'CloseClusterCryptProvider': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSCRYPTPROVIDER", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hClusCryptProvider"]),
#
'ClusterEncrypt': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSCRYPTPROVIDER", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hClusCryptProvider", "pData", "cbData", "ppData", "pcbData"]),
#
'ClusterDecrypt': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSCRYPTPROVIDER", pack=False, align=None), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hClusCryptProvider", "pCryptInput", "cbCryptInput", "ppCryptOutput", "pcbCryptOutput"]),
#
'FreeClusterCrypt': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pCryptInfo"]),
#
'ResUtilPaxosComparer': SimTypeFunction([SimTypePointer(SimStruct({"__padding__PaxosTagVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__NextEpochVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__NextEpoch_DateTimeVtable": SimTypeLongLong(signed=False, label="UInt64"), "NextEpoch_DateTime_ticks": SimTypeLongLong(signed=False, label="UInt64"), "NextEpoch_Value": SimTypeInt(signed=True, label="Int32"), "__padding__BoundryNextEpoch": SimTypeInt(signed=False, label="UInt32"), "__padding__EpochVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__Epoch_DateTimeVtable": SimTypeLongLong(signed=False, label="UInt64"), "Epoch_DateTime_ticks": SimTypeLongLong(signed=False, label="UInt64"), "Epoch_Value": SimTypeInt(signed=True, label="Int32"), "__padding__BoundryEpoch": SimTypeInt(signed=False, label="UInt32"), "Sequence": SimTypeInt(signed=True, label="Int32"), "__padding__BoundrySequence": SimTypeInt(signed=False, label="UInt32")}, name="PaxosTagCStruct", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"__padding__PaxosTagVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__NextEpochVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__NextEpoch_DateTimeVtable": SimTypeLongLong(signed=False, label="UInt64"), "NextEpoch_DateTime_ticks": SimTypeLongLong(signed=False, label="UInt64"), "NextEpoch_Value": SimTypeInt(signed=True, label="Int32"), "__padding__BoundryNextEpoch": SimTypeInt(signed=False, label="UInt32"), "__padding__EpochVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__Epoch_DateTimeVtable": SimTypeLongLong(signed=False, label="UInt64"), "Epoch_DateTime_ticks": SimTypeLongLong(signed=False, label="UInt64"), "Epoch_Value": SimTypeInt(signed=True, label="Int32"), "__padding__BoundryEpoch": SimTypeInt(signed=False, label="UInt32"), "Sequence": SimTypeInt(signed=True, label="Int32"), "__padding__BoundrySequence": SimTypeInt(signed=False, label="UInt32")}, name="PaxosTagCStruct", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["left", "right"]),
#
'ResUtilLeftPaxosIsLessThanRight': SimTypeFunction([SimTypePointer(SimStruct({"__padding__PaxosTagVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__NextEpochVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__NextEpoch_DateTimeVtable": SimTypeLongLong(signed=False, label="UInt64"), "NextEpoch_DateTime_ticks": SimTypeLongLong(signed=False, label="UInt64"), "NextEpoch_Value": SimTypeInt(signed=True, label="Int32"), "__padding__BoundryNextEpoch": SimTypeInt(signed=False, label="UInt32"), "__padding__EpochVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__Epoch_DateTimeVtable": SimTypeLongLong(signed=False, label="UInt64"), "Epoch_DateTime_ticks": SimTypeLongLong(signed=False, label="UInt64"), "Epoch_Value": SimTypeInt(signed=True, label="Int32"), "__padding__BoundryEpoch": SimTypeInt(signed=False, label="UInt32"), "Sequence": SimTypeInt(signed=True, label="Int32"), "__padding__BoundrySequence": SimTypeInt(signed=False, label="UInt32")}, name="PaxosTagCStruct", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"__padding__PaxosTagVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__NextEpochVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__NextEpoch_DateTimeVtable": SimTypeLongLong(signed=False, label="UInt64"), "NextEpoch_DateTime_ticks": SimTypeLongLong(signed=False, label="UInt64"), "NextEpoch_Value": SimTypeInt(signed=True, label="Int32"), "__padding__BoundryNextEpoch": SimTypeInt(signed=False, label="UInt32"), "__padding__EpochVtable": SimTypeLongLong(signed=False, label="UInt64"), "__padding__Epoch_DateTimeVtable": SimTypeLongLong(signed=False, label="UInt64"), "Epoch_DateTime_ticks": SimTypeLongLong(signed=False, label="UInt64"), "Epoch_Value": SimTypeInt(signed=True, label="Int32"), "__padding__BoundryEpoch": SimTypeInt(signed=False, label="UInt32"), "Sequence": SimTypeInt(signed=True, label="Int32"), "__padding__BoundrySequence": SimTypeInt(signed=False, label="UInt32")}, name="PaxosTagCStruct", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["left", "right"]),
#
'ResUtilsDeleteKeyTree': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["key", "keyName", "treatNoKeyAsError"]),
#
'ResUtilGroupsEqual': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hSelf", "hGroup", "pEqual"]),
#
'ResUtilEnumGroups': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2", "param3"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "hSelf", "pResCallBack", "pParameter"]),
#
'ResUtilEnumGroupsEx': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="CLUSGROUP_TYPE"), SimTypePointer(SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2", "param3"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "hSelf", "groupType", "pResCallBack", "pParameter"]),
#
'ResUtilDupGroup': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["group", "copy"]),
#
'ResUtilGetClusterGroupType': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="CLUSGROUP_TYPE"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hGroup", "groupType"]),
#
'ResUtilGetCoreGroup': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0)], SimTypePointer(SimStruct({}, name="_HGROUP", pack=False, align=None), offset=0), arg_names=["hCluster"]),
#
'ResUtilResourceDepEnum': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2", "param3"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hSelf", "enumType", "pResCallBack", "pParameter"]),
#
'ResUtilDupResource': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({}, name="_HRESOURCE", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["group", "copy"]),
#
'ResUtilGetClusterId': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "guid"]),
#
'ResUtilNodeEnum': SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimStruct({}, name="_HCLUSTER", pack=False, align=None), offset=0), SimTypePointer(SimStruct({}, name="_HNODE", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="CLUSTER_NODE_STATE"), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2", "param3"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hCluster", "pNodeCallBack", "pParameter"]),
}
lib.set_prototypes(prototypes)
| 380.746154
| 2,558
| 0.74555
| 10,301
| 98,994
| 7.091253
| 0.043394
| 0.13427
| 0.121347
| 0.181171
| 0.907799
| 0.90669
| 0.902173
| 0.900105
| 0.898025
| 0.894246
| 0
| 0.019587
| 0.068065
| 98,994
| 259
| 2,559
| 382.216216
| 0.772199
| 0.000283
| 0
| 0
| 0
| 0
| 0.264409
| 0.059154
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037594
| 0
| 0.037594
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a5aac05b41ab68e6724031894736e38f0ed75c40
| 9,558
|
py
|
Python
|
pbj/electrostatics/pb_formulation/formulations/common.py
|
bem4solvation/pbj
|
4fa9c111596359192539787ae241a79d4316b15b
|
[
"MIT"
] | null | null | null |
pbj/electrostatics/pb_formulation/formulations/common.py
|
bem4solvation/pbj
|
4fa9c111596359192539787ae241a79d4316b15b
|
[
"MIT"
] | 1
|
2022-02-18T17:34:37.000Z
|
2022-02-18T17:34:37.000Z
|
pbj/electrostatics/pb_formulation/formulations/common.py
|
bem4solvation/pbj
|
4fa9c111596359192539787ae241a79d4316b15b
|
[
"MIT"
] | null | null | null |
import bempp.api
import numpy as np
import time
import pbj.electrostatics.utils as utils
def calculate_potential_one_surface(self, rerun_all):
# Start the overall timing for the whole process
start_time = time.time()
if rerun_all:
self.initialise_matrices()
self.assemble_matrices()
self.initialise_rhs()
self.apply_preconditioning()
# self.pass_to_discrete_form()
else:
if "A" not in self.matrices or "rhs_1" not in self.rhs:
# If matrix A or rhs_1 doesn't exist, it must first be created
self.initialise_matrices()
self.initialise_rhs()
if not self.matrices["A"]._cached:
self.assemble_matrices()
if "A_discrete" not in self.matrices or "rhs_discrete" not in self.rhs:
# See if preconditioning needs to be applied if this hasn't been done
self.apply_preconditioning()
# if "A_discrete" not in self.matrices or "rhs_discrete" not in self.rhs:
# # See if discrete form has been called
# self.pass_to_discrete_form()
# Use GMRES to solve the system of equations
gmres_start_time = time.time()
if "preconditioning_matrix_gmres" in self.matrices and self.pb_formulation_preconditioning == True:
x, info, it_count = utils.solver(
self.matrices["A_discrete"],
self.rhs["rhs_discrete"],
self.gmres_tolerance,
self.gmres_restart,
self.gmres_max_iterations,
precond=self.matrices["preconditioning_matrix_gmres"],
)
else:
x, info, it_count = utils.solver(
self.matrices["A_discrete"],
self.rhs["rhs_discrete"],
self.gmres_tolerance,
self.gmres_restart,
self.gmres_max_iterations,
)
self.timings["time_gmres"] = time.time() - gmres_start_time
# Split solution and generate corresponding grid functions
from bempp.api.assembly.blocked_operator import (
grid_function_list_from_coefficients,
)
(dirichlet_solution, neumann_solution) = grid_function_list_from_coefficients(
x.ravel(), self.matrices["A"].domain_spaces
)
# Save number of iterations taken and the solution of the system
self.results["solver_iteration_count"] = it_count
self.results["phi"] = dirichlet_solution
if self.formulation_object.invert_potential:
self.results["d_phi"] = (self.ep_ex / self.ep_in) * neumann_solution
else:
self.results["d_phi"] = neumann_solution
# Finished computing surface potential, register total time taken
self.timings["time_compute_potential"] = time.time() - start_time
# Print times, if this is desired
if self.print_times:
show_potential_calculation_times(self)
def calculate_potential_stern(self, rerun_all):
# Start the overall timing for the whole process
start_time = time.time()
if rerun_all:
self.initialise_matrices()
self.assemble_matrices()
self.initialise_rhs()
self.apply_preconditioning()
# self.pass_to_discrete_form()
else:
if "A" not in self.matrices or "rhs_1" not in self.rhs:
# If matrix A or rhs_1 doesn't exist, it must first be created
self.initialise_matrices()
self.initialise_rhs()
if not self.matrices["A"]._cached:
self.assemble_matrices()
if "A_discrete" not in self.matrices or "rhs_discrete" not in self.rhs:
# See if preconditioning needs to be applied if this hasn't been done
self.apply_preconditioning()
# if "A_discrete" not in self.matrices or "rhs_discrete" not in self.rhs:
# # See if discrete form has been called
# self.pass_to_discrete_form()
# Use GMRES to solve the system of equations
gmres_start_time = time.time()
if "preconditioning_matrix_gmres" in self.matrices and self.pb_formulation_preconditioning == True:
x, info, it_count = utils.solver(
self.matrices["A_discrete"],
self.rhs["rhs_discrete"],
self.gmres_tolerance,
self.gmres_restart,
self.gmres_max_iterations,
precond=self.matrices["preconditioning_matrix_gmres"],
)
else:
x, info, it_count = utils.solver(
self.matrices["A_discrete"],
self.rhs["rhs_discrete"],
self.gmres_tolerance,
self.gmres_restart,
self.gmres_max_iterations,
)
self.timings["time_gmres"] = time.time() - gmres_start_time
# Split solution and generate corresponding grid functions
from bempp.api.assembly.blocked_operator import (
grid_function_list_from_coefficients,
)
(dirichlet_diel_solution, neumann_diel_solution, dirichlet_stern_solution, neumann_stern_solution) = grid_function_list_from_coefficients(
x.ravel(), self.matrices["A"].domain_spaces
)
# Save number of iterations taken and the solution of the system
self.results["solver_iteration_count"] = it_count
self.results["phi"] = dirichlet_diel_solution
self.results["d_phi"] = neumann_diel_solution
self.results["phi_stern"] = dirichlet_stern_solution
self.results["d_phi_stern"] = neumann_stern_solution
# Finished computing surface potential, register total time taken
self.timings["time_compute_potential"] = time.time() - start_time
# Print times, if this is desired
if self.print_times:
show_potential_calculation_times(self)
def calculate_potential_slic(self):
# Start the overall timing for one SLIC iteration
start_time = time.time()
self.initialise_matrices()
self.assemble_matrices()
self.apply_preconditioning()
# Use GMRES to solve the system of equations
gmres_start_time = time.time()
if "preconditioning_matrix_gmres" in self.matrices and self.pb_formulation_preconditioning == True:
x, info, it_count = utils.solver(
self.matrices["A_discrete"],
self.rhs["rhs_discrete"],
self.gmres_tolerance,
self.gmres_restart,
self.gmres_max_iterations,
precond=self.matrices["preconditioning_matrix_gmres"],
)
else:
x, info, it_count = utils.solver(
self.matrices["A_discrete"],
self.rhs["rhs_discrete"],
self.gmres_tolerance,
self.gmres_restart,
self.gmres_max_iterations,
)
self.timings["time_gmres"].append(time.time() - gmres_start_time)
# Split solution and generate corresponding grid functions
from bempp.api.assembly.blocked_operator import (
grid_function_list_from_coefficients,
)
(dirichlet_diel_solution, neumann_diel_solution, dirichlet_stern_solution, neumann_stern_solution) = grid_function_list_from_coefficients(
x.ravel(), self.matrices["A"].domain_spaces
)
# Save number of iterations taken and the solution of the system
self.results["solver_iteration_count"].append(it_count)
self.results["phi"] = dirichlet_diel_solution
self.results["d_phi"] = neumann_diel_solution
self.results["phi_stern"] = dirichlet_stern_solution
self.results["d_phi_stern"] = neumann_stern_solution
# Finished computing surface potential, register total time taken
self.timings["time_compute_potential"].append(time.time() - start_time)
# Print times, if this is desired
if self.print_times:
show_potential_calculation_times(self)
def show_potential_calculation_times(self):
if "phi" in self.results:
print(
"It took ",
self.timings["time_matrix_construction"],
" seconds to construct the matrices",
)
print(
"It took ",
self.timings["time_rhs_construction"],
" seconds to construct the rhs vectors",
)
print(
"It took ",
self.timings["time_matrix_to_discrete"],
" seconds to pass the main matrix to discrete form ("
+ self.discrete_form_type
+ ")",
)
print(
"It took ",
self.timings["time_preconditioning"],
" seconds to compute and apply the preconditioning ("
+ str(self.pb_formulation_preconditioning)
+ "("
+ self.pb_formulation_preconditioning_type
+ ")",
)
print(
"It took ",
self.timings["time_gmres"],
" seconds to resolve the system using GMRES",
)
print(
"It took ",
self.timings["time_compute_potential"],
" seconds in total to compute the surface potential",
)
else:
print("Potential must first be calculated to show times.")
| 39.659751
| 146
| 0.607135
| 1,072
| 9,558
| 5.165112
| 0.126866
| 0.049846
| 0.019505
| 0.024562
| 0.880621
| 0.851544
| 0.842153
| 0.81145
| 0.81145
| 0.81145
| 0
| 0.000609
| 0.313245
| 9,558
| 241
| 147
| 39.659751
| 0.842931
| 0.15882
| 0
| 0.709497
| 0
| 0
| 0.134865
| 0.048701
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022346
| false
| 0.005587
| 0.039106
| 0
| 0.061453
| 0.055866
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c136ddfeeadcad881dd3a13b0e371196081a61c
| 149
|
py
|
Python
|
tests/parser/bug.29.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/bug.29.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/bug.29.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
:- a, c.
a | b.
a | a.
fact1.
:- fact1, not c.
"""
output = """
:- a, c.
a | b.
a | a.
fact1.
:- fact1, not c.
"""
| 7.095238
| 17
| 0.342282
| 22
| 149
| 2.318182
| 0.318182
| 0.078431
| 0.117647
| 0.156863
| 0.784314
| 0.784314
| 0.784314
| 0.784314
| 0.784314
| 0.784314
| 0
| 0.043956
| 0.389262
| 149
| 20
| 18
| 7.45
| 0.516484
| 0
| 0
| 0.857143
| 0
| 0
| 0.766917
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3c89f17803d66a11488351c4fa70fb6cb9968914
| 47
|
py
|
Python
|
basicts/archs/Stat_arch/__init__.py
|
zezhishao/GuanCang_BasicTS
|
bbf82b9d08e82db78d4e9e9b11f43a676b54ad7c
|
[
"Apache-2.0"
] | 3
|
2022-02-22T12:50:08.000Z
|
2022-03-13T03:38:46.000Z
|
basicts/archs/Stat_arch/__init__.py
|
zezhishao/GuanCang_BasicTS
|
bbf82b9d08e82db78d4e9e9b11f43a676b54ad7c
|
[
"Apache-2.0"
] | null | null | null |
basicts/archs/Stat_arch/__init__.py
|
zezhishao/GuanCang_BasicTS
|
bbf82b9d08e82db78d4e9e9b11f43a676b54ad7c
|
[
"Apache-2.0"
] | null | null | null |
from basicts.archs.Stat_arch.Stat_arch import *
| 47
| 47
| 0.851064
| 8
| 47
| 4.75
| 0.75
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06383
| 47
| 1
| 47
| 47
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.