hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
0ed2ea837532ec258b30013349fd4135cb2a627d
155
py
Python
tests/__init__.py
cronitorio/django_auto_healthchecks
2edf0b28bd01ee2d4088a834de5c93df463c9cd5
[ "MIT" ]
4
2017-02-24T08:20:44.000Z
2019-11-04T15:11:10.000Z
tests/__init__.py
cronitorio/django_auto_healthchecks
2edf0b28bd01ee2d4088a834de5c93df463c9cd5
[ "MIT" ]
null
null
null
tests/__init__.py
cronitorio/django_auto_healthchecks
2edf0b28bd01ee2d4088a834de5c93df463c9cd5
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- class MockSettings(object): def __init__(self, **kwargs): [self.__setattr__(key, val) for key, val in kwargs.items()]
25.833333
67
0.625806
20
155
4.45
0.8
0.134831
0
0
0
0
0
0
0
0
0
0.008
0.193548
155
5
68
31
0.704
0.135484
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
0edc28d87f228f56d4f4bac9e0dae0cee31eb386
79
py
Python
test/test_longest_turbulent_subarray.py
spencercjh/sync-leetcode-today-problem-python3-example
4957e5eadb697334741df0fc297bec2edaa9e2ab
[ "Apache-2.0" ]
null
null
null
test/test_longest_turbulent_subarray.py
spencercjh/sync-leetcode-today-problem-python3-example
4957e5eadb697334741df0fc297bec2edaa9e2ab
[ "Apache-2.0" ]
null
null
null
test/test_longest_turbulent_subarray.py
spencercjh/sync-leetcode-today-problem-python3-example
4957e5eadb697334741df0fc297bec2edaa9e2ab
[ "Apache-2.0" ]
null
null
null
solution = LongestTurbulentSubarray() assert X == solution.maxTurbulenceSize( )
39.5
41
0.810127
6
79
10.666667
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.088608
79
2
41
39.5
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.5
1
0
false
0
0
0
0
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
0ee3bbcae049caf7f2b635769e423db21d674b59
280
py
Python
scout/notifications/admin.py
theteam/django-scout
75d6b708d4c65cca05e8f1678df40543f6e81b0d
[ "BSD-3-Clause" ]
null
null
null
scout/notifications/admin.py
theteam/django-scout
75d6b708d4c65cca05e8f1678df40543f6e81b0d
[ "BSD-3-Clause" ]
null
null
null
scout/notifications/admin.py
theteam/django-scout
75d6b708d4c65cca05e8f1678df40543f6e81b0d
[ "BSD-3-Clause" ]
1
2022-03-20T04:22:41.000Z
2022-03-20T04:22:41.000Z
from django.contrib import admin from scout.notifications.models import NotificationProfile class NotificationProfileAdmin(admin.ModelAdmin): list_display = ['user', 'notification_email', 'mobile_number'] admin.site.register(NotificationProfile, NotificationProfileAdmin)
28
66
0.828571
27
280
8.481481
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.089286
280
9
67
31.111111
0.898039
0
0
0
0
0
0.125
0
0
0
0
0
0
1
0
false
0
0.4
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
160beb9ee55bc05f2ae812b52a21056f29b019ff
87
py
Python
option/apps.py
puchchi/WebFramework
8353ffd46827917303a729c64b9f79a51fe8a034
[ "MIT" ]
1
2019-04-01T17:04:54.000Z
2019-04-01T17:04:54.000Z
option/apps.py
puchchi/WebFramework
8353ffd46827917303a729c64b9f79a51fe8a034
[ "MIT" ]
null
null
null
option/apps.py
puchchi/WebFramework
8353ffd46827917303a729c64b9f79a51fe8a034
[ "MIT" ]
null
null
null
from django.apps import AppConfig class optionConfig(AppConfig): name = 'option'
14.5
33
0.747126
10
87
6.5
0.9
0
0
0
0
0
0
0
0
0
0
0
0.172414
87
5
34
17.4
0.902778
0
0
0
0
0
0.068966
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
1611fcc9dcb829f41b13f834cdb7a36fa40d577b
126
py
Python
flavio/physics/scattering/__init__.py
Felicia56/flavio
ea735bd8febbb961d249eddf338a4960c1fbee69
[ "MIT" ]
61
2016-03-09T16:19:39.000Z
2022-03-30T00:55:51.000Z
flavio/physics/scattering/__init__.py
Felicia56/flavio
ea735bd8febbb961d249eddf338a4960c1fbee69
[ "MIT" ]
167
2016-03-15T15:25:57.000Z
2022-02-27T22:19:22.000Z
flavio/physics/scattering/__init__.py
Felicia56/flavio
ea735bd8febbb961d249eddf338a4960c1fbee69
[ "MIT" ]
57
2016-03-15T14:24:23.000Z
2022-01-14T01:00:03.000Z
r"""Module for observables in scattering of electron-positron or proton-(anti)proton to SM particles.""" from . import ee_ww
25.2
84
0.769841
20
126
4.8
0.95
0
0
0
0
0
0
0
0
0
0
0
0.134921
126
4
85
31.5
0.880734
0.769841
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
16600fa235840d4720fdd8f05799d3581a7f6221
142
py
Python
tudatpy/cli/templates/test.py
gitter-badger/tudatpy
f5faef4ecfe8667cb9c989dd839185eeed5f9662
[ "BSD-3-Clause" ]
null
null
null
tudatpy/cli/templates/test.py
gitter-badger/tudatpy
f5faef4ecfe8667cb9c989dd839185eeed5f9662
[ "BSD-3-Clause" ]
null
null
null
tudatpy/cli/templates/test.py
gitter-badger/tudatpy
f5faef4ecfe8667cb9c989dd839185eeed5f9662
[ "BSD-3-Clause" ]
null
null
null
import jinja2 template_loader = jinja2.FileSystemLoader(searchpath=template_path) template_env = jinja2.Environment(loader=template_loader)
23.666667
67
0.859155
16
142
7.375
0.5625
0.237288
0
0
0
0
0
0
0
0
0
0.022727
0.070423
142
5
68
28.4
0.871212
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
16b617183a310adb70ee065fd77e52ff76bab5e1
305
py
Python
onto/mapper/helpers.py
billyrrr/onto
72733d36a2583ae4758f7cf33a5229b79773702b
[ "MIT" ]
1
2020-10-04T10:01:45.000Z
2020-10-04T10:01:45.000Z
onto/mapper/helpers.py
billyrrr/onto
72733d36a2583ae4758f7cf33a5229b79773702b
[ "MIT" ]
null
null
null
onto/mapper/helpers.py
billyrrr/onto
72733d36a2583ae4758f7cf33a5229b79773702b
[ "MIT" ]
null
null
null
from collections import namedtuple RelationshipReference = namedtuple( "RelationshipReference", ['doc_ref', 'nested', 'obj', 'obj_type'], defaults=(None, None, None, None) ) EmbeddedElement = namedtuple( "EmbeddedElement", ['d', 'obj_cls', 'obj'], defaults=(None, None, None) )
20.333333
45
0.662295
29
305
6.862069
0.517241
0.201005
0.180905
0.201005
0
0
0
0
0
0
0
0
0.180328
305
14
46
21.785714
0.796
0
0
0
0
0
0.232787
0.068852
0
0
0
0
0
1
0
false
0
0.090909
0
0.090909
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
16c30ecb3823226becb18f821c6873c917332628
129
py
Python
pysomatic/routes/predictions.py
MedleyLabs/DeepAutonomic
071aabafcc6e5e4c61c7d8e1153c877c52226d31
[ "MIT" ]
null
null
null
pysomatic/routes/predictions.py
MedleyLabs/DeepAutonomic
071aabafcc6e5e4c61c7d8e1153c877c52226d31
[ "MIT" ]
null
null
null
pysomatic/routes/predictions.py
MedleyLabs/DeepAutonomic
071aabafcc6e5e4c61c7d8e1153c877c52226d31
[ "MIT" ]
null
null
null
from flask import Blueprint, request, url_for, redirect, flash, make_response predictions = Blueprint('predictions', __name__)
25.8
77
0.79845
15
129
6.466667
0.866667
0
0
0
0
0
0
0
0
0
0
0
0.116279
129
4
78
32.25
0.850877
0
0
0
0
0
0.085271
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
1
0
4
bc41dcd770a141766a15cc3034ae7ad9195b1bf9
26
py
Python
calculate.py
liuzhiyang-liu/lesson2
c50888764bf1c55df77b66fb9bdb156f955fd760
[ "Apache-2.0" ]
null
null
null
calculate.py
liuzhiyang-liu/lesson2
c50888764bf1c55df77b66fb9bdb156f955fd760
[ "Apache-2.0" ]
null
null
null
calculate.py
liuzhiyang-liu/lesson2
c50888764bf1c55df77b66fb9bdb156f955fd760
[ "Apache-2.0" ]
null
null
null
a=2 b=7 c=a // b print(c)
5.2
8
0.5
9
26
1.444444
0.666667
0
0
0
0
0
0
0
0
0
0
0.1
0.230769
26
4
9
6.5
0.55
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
bc55227dda4595d3ed62dbf1445aa79d820c58f2
1,393
py
Python
tests/test_spec.py
sfischer13/python-ssv
b9f8bf7bc6fc259a7b57781673959a42d8631b5b
[ "MIT" ]
3
2016-05-28T10:17:48.000Z
2020-09-25T17:25:24.000Z
tests/test_spec.py
sfischer13/python-ssv
b9f8bf7bc6fc259a7b57781673959a42d8631b5b
[ "MIT" ]
null
null
null
tests/test_spec.py
sfischer13/python-ssv
b9f8bf7bc6fc259a7b57781673959a42d8631b5b
[ "MIT" ]
null
null
null
#!/usr/bin/env python """ test_spec --------- Tests for the the SSV specification. """ import tempfile import unittest import ssv from ssv.constant import RS from ssv.constant import US class TestSpec(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def check_in_out(self, table, string): string = string.replace("R", RS) string = string.replace("U", US) self.assertEqual(ssv.dumps(table), string) self.assertEqual(table, ssv.loads(string)) def test_spec_000(self): self.check_in_out([[""]], "") def test_spec_001(self): self.check_in_out([[""], [""]], "R") def test_spec_002(self): self.check_in_out([["1"], [""]], "1R") def test_spec_003(self): self.check_in_out([[""], ["1"]], "R1") def test_spec_004(self): self.check_in_out([["1"], ["2"]], "1R2") def test_spec_005(self): self.check_in_out([[""], ["", ""]], "RU") def test_spec_006(self): self.check_in_out([[""], ["1", ""]], "R1U") def test_spec_007(self): self.check_in_out([[""], ["", "1"]], "RU1") def test_spec_008(self): self.check_in_out([[""], ["1", "2"]], "R1U2") def test_spec_009(self): self.check_in_out([["1"], ["2", "3"]], "1R2U3") if __name__ == '__main__': import sys sys.exit(unittest.main())
20.791045
55
0.563532
188
1,393
3.904255
0.335106
0.119891
0.149864
0.20436
0.258856
0.185286
0.081744
0
0
0
0
0.048507
0.230438
1,393
66
56
21.106061
0.636194
0.055994
0
0.052632
0
0
0.035195
0
0
0
0
0
0.052632
1
0.342105
false
0.052632
0.157895
0
0.526316
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
bc673fa26474caf7f7b2dd29d67f2146f8e8a3c7
134
py
Python
django_app/seedstars/apps.py
yudori/seedstars_challenge
2fbc0de5a704f22ad79eb8894b77e4b614788906
[ "MIT" ]
null
null
null
django_app/seedstars/apps.py
yudori/seedstars_challenge
2fbc0de5a704f22ad79eb8894b77e4b614788906
[ "MIT" ]
3
2020-02-11T23:16:16.000Z
2021-06-10T18:32:14.000Z
django_app/seedstars/apps.py
yudori/seedstars_challenge
2fbc0de5a704f22ad79eb8894b77e4b614788906
[ "MIT" ]
null
null
null
from __future__ import unicode_literals from django.apps import AppConfig class SeedstarsConfig(AppConfig): name = 'seedstars'
16.75
39
0.798507
15
134
6.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.149254
134
7
40
19.142857
0.894737
0
0
0
0
0
0.067164
0
0
0
0
0
0
1
0
false
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
bc8083e8c81c2566a2714edd7862cb6506a0b47b
119,116
py
Python
kive/metadata/tests.py
dmacmillan/Kive
76bc8f289f66fb133f78cb6d5689568b7d015915
[ "BSD-3-Clause" ]
1
2021-12-22T06:10:01.000Z
2021-12-22T06:10:01.000Z
kive/metadata/tests.py
dmacmillan/Kive
76bc8f289f66fb133f78cb6d5689568b7d015915
[ "BSD-3-Clause" ]
null
null
null
kive/metadata/tests.py
dmacmillan/Kive
76bc8f289f66fb133f78cb6d5689568b7d015915
[ "BSD-3-Clause" ]
null
null
null
""" Unit tests for Shipyard metadata models. """ import os import re from django.core.exceptions import ValidationError from django.test import TestCase, skipIfDBFeature from django.core.urlresolvers import reverse, resolve from django.contrib.auth.models import User, Group from rest_framework import status from rest_framework.test import APIRequestFactory, force_authenticate from metadata.models import BasicConstraint, CompoundDatatype, Datatype, kive_user, everyone_group from librarian.models import Dataset from metadata.serializers import CompoundDatatypeSerializer from constants import CDTs, datatypes, groups import kive.testing_utils as tools from kive.tests import DuckContext samplecode_path = "../samplecode" @skipIfDBFeature('is_mocked') class MetadataTestCase(TestCase): """ Set up a database state for unit testing. Other test classes that require this state can extend this one. """ def setUp(self): tools.create_metadata_test_environment(self) def tearDown(self): tools.clean_up_all_files() class DatatypeTests(MetadataTestCase): def setUp(self): """Add some DTs used to check circular restrictions.""" MetadataTestCase.setUp(self) # Datatypes used to test circular restrictions. self.dt_1 = Datatype( name="dt_1", description="A string (1)", user=self.myUser) self.dt_1.save() self.dt_1.restricts.add(self.string_dt) self.dt_2 = Datatype( name="dt_2", description="A string (2)", user=self.myUser) self.dt_2.save() self.dt_2.restricts.add(self.string_dt) self.dt_3 = Datatype( name="dt_3", description="A string (3)", user=self.myUser) self.dt_3.save() self.dt_3.restricts.add(self.string_dt) self.dt_4 = Datatype( name="dt_4", description="A string (4)", user=self.myUser) self.dt_4.save() self.dt_4.restricts.add(self.string_dt) self.dt_5 = Datatype( name="dt_5", description="A string (5)", user=self.myUser) self.dt_5.save() self.dt_5.restricts.add(self.string_dt) def test_datatype_unicode(self): """ Unicode representation must be the instance's name. """ my_datatype = Datatype(name="fhqwhgads", user=self.myUser) self.assertEqual(unicode(my_datatype), "fhqwhgads") # ### Unit tests for datatype.clean (Circular restrictions) ### # Direct circular cases: start, middle, end # Start dt1 restricts dt1, dt3, dt4 # Middle dt1 restricts dt3, dt1, dt4 # End dt1 restricts dt3, dt4, dt1 # Good dt1 restricts dt2, dt3, dt4 def test_datatype_circular_direct_start_clean_bad(self): """ Circular, direct, start dt1 restricts dt1, dt3, dt4 """ self.dt_1.restricts.add(self.dt_1) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.assertRaisesRegexp(ValidationError, re.escape('Datatype "{}" has a circular restriction'.format(self.dt_1)), self.dt_1.clean) def test_datatype_circular_direct_middle_clean_bad(self): """ Circular, direct, middle dt1 restricts dt3, dt1, dt4 """ self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_1) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.assertRaisesRegexp(ValidationError, re.escape('Datatype "{}" has a circular restriction'.format(self.dt_1)), self.dt_1.clean) def test_datatype_circular_direct_end_clean_bad(self): """ Circular, direct, middle dt1 restricts dt3, dt4, dt1 """ self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.restricts.add(self.dt_1) self.dt_1.save() self.assertRaisesRegexp(ValidationError, re.escape('Datatype "{}" has a circular restriction'.format(self.dt_1)), self.dt_1.clean) def test_datatype_circular_direct_clean_good(self): """ dt1 restricts dt2, dt3, dt4 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.assertEqual(self.dt_1.clean(), None) def test_datatype_circular_recursive_begin_clean_bad(self): """ dt1 restricts dt2, dt3, dt4 dt2 restricts dt1 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_2.restricts.add(self.dt_1) self.dt_2.save() self.assertRaisesRegexp(ValidationError, re.escape('Datatype "{}" has a circular restriction'.format(self.dt_1)), self.dt_1.clean) def test_datatype_circular_recursive_middle_clean_bad(self): """ dt1 restricts dt2, dt3, dt4 dt3 restricts dt1 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_3.restricts.add(self.dt_1) self.dt_3.save() self.assertRaisesRegexp(ValidationError, re.escape('Datatype "{}" has a circular restriction'.format(self.dt_1)), self.dt_1.clean) def test_datatype_circular_recursive_end_clean_bad(self): """ dt1 restricts dt2, dt3, dt4 dt4 restricts dt1 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_4.restricts.add(self.dt_1) self.dt_4.save() self.assertRaisesRegexp(ValidationError, re.escape('Datatype "{}" has a circular restriction'.format(self.dt_1)), self.dt_1.clean) def test_datatype_circular_recursive_clean_good1(self): """ dt1 restricts dt2, dt3, dt4 dt2 restricts dt5 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_2.restricts.add(self.dt_5) self.dt_2.save() self.assertEqual(self.dt_1.clean(), None) def test_datatype_circular_recursive_clean_good2(self): """ dt1 restricts dt2, dt3, dt4 dt3 restricts dt5 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_3.restricts.add(self.dt_5) self.dt_3.save() self.assertEqual(self.dt_1.clean(), None) def test_datatype_circular_recursive_clean_good3(self): """ dt1 restricts dt2, dt3, dt4 dt4 restricts dt5 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_4.restricts.add(self.dt_5) self.dt_4.save() self.assertEqual(self.dt_1.clean(), None) def test_datatype_circular_recursive_clean_good4(self): """ dt1 restricts dt2, dt3, dt4 dt2 restricts dt4 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_2.restricts.add(self.dt_4) self.dt_2.save() self.assertEqual(self.dt_1.clean(), None) def test_datatype_circular_recursive_clean_good5(self): """ dt1 restricts dt2, dt3, dt4 dt3 restricts dt4 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_3.restricts.add(self.dt_4) self.dt_3.save() self.assertEqual(self.dt_1.clean(), None) def test_datatype_circular_recursive_clean_good6(self): """ dt1 restricts dt2, dt3, dt4 dt4 restricts dt2 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_4.restricts.add(self.dt_2) self.dt_4.save() self.assertEqual(self.dt_1.clean(), None) def test_datatype_direct_is_restricted_by_1(self): """ dt1 restricts dt2 dt1.is_restricted_by(dt2) - FALSE dt2.is_restricted_by(dt1) - TRUE """ self.dt_1.restricts.add(self.dt_2) self.dt_1.save() self.assertEqual(self.dt_1.is_restricted_by(self.dt_2), False) self.assertEqual(self.dt_2.is_restricted_by(self.dt_1), True) def test_datatype_direct_is_restricted_by_2(self): """ dt1 and dt2 exist but do not restrict each other dt1.is_restricted_by(dt2) - FALSE dt2.is_restricted_by(dt1) - FALSE """ self.assertEqual(self.dt_1.is_restricted_by(self.dt_2), False) self.assertEqual(self.dt_2.is_restricted_by(self.dt_1), False) def test_datatype_recursive_is_restricted_by_1(self): """ dt1 restricts dt2, dt2 restricts dt3 dt1.is_restricted_by(dt3) - FALSE dt3.is_restricted_by(dt1) - TRUE dt1.is_restricted_by(dt2) - FALSE dt2.is_restricted_by(dt1) - TRUE """ self.dt_1.restricts.add(self.dt_2) self.dt_1.save() self.dt_2.restricts.add(self.dt_3) self.dt_2.save() self.assertEqual(self.dt_1.is_restricted_by(self.dt_3), False) self.assertEqual(self.dt_3.is_restricted_by(self.dt_1), True) self.assertEqual(self.dt_1.is_restricted_by(self.dt_2), False) self.assertEqual(self.dt_2.is_restricted_by(self.dt_1), True) def test_datatype_recursive_is_restricted_by_2(self): """ dt1 restricts dt[2,3,4] dt2 restricts dt5 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_2.restricts.add(self.dt_5) self.dt_2.save() self.assertEqual(self.dt_1.is_restricted_by(self.dt_2), False) self.assertEqual(self.dt_1.is_restricted_by(self.dt_3), False) self.assertEqual(self.dt_1.is_restricted_by(self.dt_4), False) self.assertEqual(self.dt_1.is_restricted_by(self.dt_5), False) self.assertEqual(self.dt_5.is_restricted_by(self.dt_1), True) def test_datatype_recursive_is_restricted_by_3(self): """ dt1 restricts dt[2,3,4] dt3 restricts dt5 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_3.restricts.add(self.dt_5) self.dt_3.save() self.assertEqual(self.dt_1.is_restricted_by(self.dt_2), False) self.assertEqual(self.dt_1.is_restricted_by(self.dt_3), False) self.assertEqual(self.dt_1.is_restricted_by(self.dt_4), False) self.assertEqual(self.dt_1.is_restricted_by(self.dt_5), False) self.assertEqual(self.dt_5.is_restricted_by(self.dt_1), True) def test_datatype_recursive_is_restricted_by_4(self): """ dt1 restricts dt[2,3,4] dt4 restricts dt5 """ self.dt_1.restricts.add(self.dt_2) self.dt_1.restricts.add(self.dt_3) self.dt_1.restricts.add(self.dt_4) self.dt_1.save() self.dt_4.restricts.add(self.dt_5) self.dt_4.save() self.assertEqual(self.dt_1.is_restricted_by(self.dt_2), False) self.assertEqual(self.dt_1.is_restricted_by(self.dt_3), False) self.assertEqual(self.dt_1.is_restricted_by(self.dt_4), False) self.assertEqual(self.dt_1.is_restricted_by(self.dt_5), False) self.assertEqual(self.dt_5.is_restricted_by(self.dt_1), True) def test_datatype_no_restriction_clean_good(self): """ Datatype without any restrictions. """ self.assertEqual(self.dt_1.clean(), None) def test_datatype_nested_valid_restrictions_clean_good(self): """ Datatypes such that A restricts B, and B restricts C """ self.dt_1.restricts.add(self.dt_2) self.dt_1.save() self.dt_2.restricts.add(self.dt_3) self.dt_2.save() self.assertEqual(self.dt_1.clean(), None) self.assertEqual(self.dt_2.clean(), None) self.assertEqual(self.dt_3.clean(), None) def test_datatype_nested_invalid_restrictions_scrambled_clean_bad(self): """ Datatypes are restricted to constrain execution order such that: A restricts C A restricts B B restricts C C restricts A """ self.dt_1.restricts.add(self.dt_3) self.dt_1.save() self.dt_1.restricts.add(self.dt_2) self.dt_1.save() self.dt_2.restricts.add(self.dt_3) self.dt_2.save() self.dt_3.restricts.add(self.dt_1) self.dt_3.save() self.assertRaisesRegexp(ValidationError, re.escape('Datatype "{}" has a circular restriction'.format(self.dt_1)), self.dt_1.clean) def test_datatype_direct_circular_restriction_clean_bad(self): """ Datatype directly restricts itself: A restricts A """ self.dt_1.restricts.add(self.dt_1) self.dt_1.save() self.assertRaisesRegexp(ValidationError, re.escape('Datatype "{}" has a circular restriction'.format(self.dt_1)), self.dt_1.clean) def test_datatype_circular_restriction_indirect_clean(self): """ Datatype restricts itself through intermediary: A restricts B B restricts A """ self.dt_1.restricts.add(self.dt_2) self.dt_1.save() self.dt_2.restricts.add(self.dt_1) self.dt_2.save() self.assertRaisesRegexp(ValidationError, re.escape('Datatype "{}" has a circular restriction'.format(self.dt_1)), self.dt_1.clean) def test_datatype_clean_no_restricts(self): """ Clean on a Datatype with no restrictions should pass. """ datatype = Datatype( name="squeaky", description="a clean, new datatype", user=self.myUser) # Note that this passes if the next line is uncommented. # datatype.save() self.assertEqual(datatype.clean(), None) ######## # New tests to check the new functionality in Datatype.clean() # that checks BasicConstraints, the prototype Dataset, etc. def __test_clean_restrict_same_builtin_multiply_good_h(self, builtin_type): """ Helper for testing clean() on cases where a Datatype restricts several supertypes with the same builtin type. """ super_DT = Datatype(name="SuperDT", description="Supertype 1", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(builtin_type) super2_DT = Datatype(name="SuperDT2", description="Supertype 2", user=self.myUser) super2_DT.full_clean() super2_DT.save() super2_DT.restricts.add(builtin_type) my_DT = Datatype(name="MyDT", description="Datatype with two built-in supertypes", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(builtin_type, builtin_type) your_DT = Datatype(name="YourDT", description="Datatype with two supertypes", user=self.myUser) your_DT.full_clean() your_DT.save() your_DT.restricts.add(super_DT, super2_DT) self.assertEquals(my_DT.clean(), None) self.assertEquals(your_DT.clean(), None) def test_clean_restrict_several_str_good(self): """ Testing clean() on the case where a Datatype restricts several string supertypes. """ self.__test_clean_restrict_same_builtin_multiply_good_h(self.STR) def test_clean_restrict_several_int_good(self): """ Testing clean() on the case where a Datatype restricts several integer supertypes. """ self.__test_clean_restrict_same_builtin_multiply_good_h(self.INT) def test_clean_restrict_several_float_good(self): """ Testing clean() on the case where a Datatype restricts several float supertypes. """ self.__test_clean_restrict_same_builtin_multiply_good_h(self.FLOAT) def test_clean_restrict_several_bool_good(self): """ Testing clean() on the case where a Datatype restricts several Boolean supertypes. """ self.__test_clean_restrict_same_builtin_multiply_good_h(self.BOOL) def test_clean_restrict_int_float_good(self): """ Testing clean() on the case where a Datatype restricts both integer and float supertypes. """ super_DT = Datatype(name="SuperDT", description="Supertype 1", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.INT) super2_DT = Datatype(name="SuperDT2", description="Supertype 2", user=self.myUser) super2_DT.full_clean() super2_DT.save() super2_DT.restricts.add(self.FLOAT) my_DT = Datatype(name="MyDT", description="Datatype with two built-in supertypes", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.INT, self.FLOAT) your_DT = Datatype(name="YourDT", description="Datatype with two supertypes", user=self.myUser) your_DT.full_clean() your_DT.save() your_DT.restricts.add(super_DT, super2_DT) self.assertEquals(my_DT.clean(), None) self.assertEquals(your_DT.clean(), None) #### def __test_clean_restrict_several_builtins_bad_h(self, builtin_type_1, builtin_type_2): """ Helper for testing clean() on cases where a Datatype restricts supertypes with non-compatible builtin types. """ super_DT = Datatype(name="SuperDT", description="Supertype 1", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(builtin_type_1) super2_DT = Datatype(name="SuperDT2", description="Supertype 2", user=self.myUser) super2_DT.full_clean() super2_DT.save() super2_DT.restricts.add(builtin_type_2) my_DT = Datatype(name="MyDT", description="Datatype with two built-in supertypes", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(builtin_type_1, builtin_type_2) your_DT = Datatype(name="YourDT", description="Datatype with two supertypes", user=self.myUser) your_DT.full_clean() your_DT.save() your_DT.restricts.add(super_DT, super2_DT) self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" restricts multiple built-in, non-numeric types' .format(my_DT))), my_DT.clean) self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" restricts multiple built-in, non-numeric types' .format(your_DT))), your_DT.clean) def test_clean_restricts_str_int_bad(self): """ Tests clean() on the case of a Datatype restricting both STR and INT. """ self.__test_clean_restrict_several_builtins_bad_h(self.STR, self.INT) def test_clean_restricts_str_float_bad(self): """ Tests clean() on the case of a Datatype restricting both STR and FLOAT. """ self.__test_clean_restrict_several_builtins_bad_h(self.STR, self.FLOAT) def test_clean_restricts_str_bool_bad(self): """ Tests clean() on the case of a Datatype restricting both STR and BOOL. """ self.__test_clean_restrict_several_builtins_bad_h(self.STR, self.BOOL) def test_clean_restricts_float_bool_bad(self): """ Tests clean() on the case of a Datatype restricting both FLOAT and BOOL. """ self.__test_clean_restrict_several_builtins_bad_h(self.FLOAT, self.BOOL) def test_clean_restricts_int_bool_bad(self): """ Tests clean() on the case of a Datatype restricting both INT and BOOL. """ self.__test_clean_restrict_several_builtins_bad_h(self.BOOL, self.INT) #### def test_clean_prototype_good(self): """ Testing clean() on a Datatype whose prototype is well-defined. """ # Make a Dataset for the prototype CSV file. PROTOTYPE_CDT = CompoundDatatype.objects.get(pk=CDTs.PROTOTYPE_PK) DNA_prototype = Dataset.create_dataset(os.path.join(samplecode_path, "DNAprototype.csv"), user=self.myUser, cdt=PROTOTYPE_CDT, name="DNAPrototype", description="Prototype for the DNA Datatype") self.DNA_dt.prototype = DNA_prototype self.assertEquals(self.DNA_dt.clean(), None) def test_clean_raw_prototype_bad(self): """ Testing clean() on a Datatype whose prototype is raw. """ DNA_raw_prototype = Dataset.create_dataset(os.path.join(samplecode_path, "DNAprototype.csv"), user=self.myUser, cdt=None, name="RawPrototype", description="Prototype that is raw") self.DNA_dt.prototype = DNA_raw_prototype PROTOTYPE_CDT = CompoundDatatype.objects.get(pk=CDTs.PROTOTYPE_PK) self.assertRaisesRegexp(ValidationError, re.escape('Prototype Dataset for Datatype "{}" should have CompoundDatatype "{}", ' 'but it is raw'.format(self.DNA_dt, PROTOTYPE_CDT)), self.DNA_dt.clean) def test_clean_prototype_wrong_CDT_bad(self): """ Testing clean() on a Datatype whose prototype has the incorrect CDT. """ wrong_CDT = CompoundDatatype(user=self.myUser) wrong_CDT.save() wrong_CDT.members.create(datatype=self.STR, column_name="example", column_idx=1, blankable=True) wrong_CDT.members.create(datatype=self.BOOL, column_name="thisshouldbesomethingelse", column_idx=2) wrong_CDT.clean() DNA_prototype_bad_CDT = Dataset.create_dataset(os.path.join(samplecode_path, "DNAprototype_bad_CDT.csv"), user=self.myUser, cdt=wrong_CDT, name="BadCDTPrototype", description="Prototype with a bad CDT") self.DNA_dt.prototype = DNA_prototype_bad_CDT PROTOTYPE_CDT = CompoundDatatype.objects.get(pk=CDTs.PROTOTYPE_PK) self.assertRaisesRegexp(ValidationError, re.escape(('Prototype Dataset for Datatype "{}" should have CompoundDatatype "{}", ' 'but it has "{}"'.format(self.DNA_dt, PROTOTYPE_CDT, wrong_CDT))), self.DNA_dt.clean) # Propagation of BasicConstraint errors is checked thoroughly in the BasicConstraint # tests. Let's just quickly check two cases. def test_clean_BC_clean_propagation_good(self): """ Testing to confirm that BasicConstraint.clean() is called from Datatype.clean(): good case. """ constr_DT = Datatype(name="ConstrainedDatatype", description="Datatype with good BasicConstraint", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(self.FLOAT) constr_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_VAL, rule="7") self.assertEquals(constr_DT.clean(), None) def test_clean_BC_clean_propagation_bad(self): """ Testing to confirm that BasicConstraint.clean() is called from Datatype.clean(): bad case. """ constr_DT = Datatype(name="BadlyConstrainedDatatype", description="Datatype with bad BasicConstraint", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(self.FLOAT) constr = constr_DT.basic_constraints.create(ruletype=BasicConstraint.DATETIMEFORMAT, rule="%Y %b %d") self.assertRaisesRegexp(ValidationError, re.escape((('BasicConstraint "{}" specifies a date/time format, but its parent ' 'Datatype "{}" has builtin type "{}"').format( constr, constr_DT, self.FLOAT))), constr_DT.clean) # Cases where a Datatype has a good BasicConstraint associated to it are well-tested in the # BasicConstraint tests. Again we quickly check a couple of cases. def test_clean_has_good_regexp_good(self): """ Testing clean() on a Datatype with a good REGEXP attached. """ constr_DT = Datatype(name="ConstrainedDatatype", description="Datatype with good REGEXP", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(self.FLOAT) constr_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule=".*") self.assertEquals(constr_DT.clean(), None) def test_clean_has_good_min_val_good(self): """ Testing clean() on a Datatype with a good MIN_VAL attached. """ constr_DT = Datatype(name="ConstrainedDatatype", description="Datatype with good MIN_VAL", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(self.INT) constr_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_VAL, rule="-22.3") self.assertEquals(constr_DT.clean(), None) # Cases where a Datatype has several good BCs attached. def test_clean_float_has_several_good_BCs_good(self): """ Testing clean() on a Datatype with several good BCs attached. """ constr_DT = Datatype(name="ConstrainedDatatype", description="FLOAT with good BCs", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(self.FLOAT) constr_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_VAL, rule="1000") constr_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_VAL, rule="1.7") constr_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="[1-9]*") self.assertEquals(constr_DT.clean(), None) def test_clean_string_has_several_good_BCs_good(self): """ Testing clean() on a string Datatype with several good BCs attached. """ constr_DT = Datatype(name="ConstrainedDatatype", description="STR with good BCs", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(self.STR) constr_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_LENGTH, rule="6") constr_DT.basic_constraints.create(ruletype=BasicConstraint.DATETIMEFORMAT, rule="%Y %b %d") self.assertEquals(constr_DT.clean(), None) #### # Cases where a Datatype has multiple BasicConstraints of the same type. def __test_clean_multiple_same_BCs_bad_h(self, builtin_type, rules, multiple_BC_type): """ Helper for the case where a Datatype has multiple BasicConstraints of the same type. rules is a list of tuples of the form (ruletype, rule). multiple_BC_type is one of BasicConstraint.(MIN|MAX)_(LENGTH|VAL) or BasicConstraint.DATETIMEFORMAT. """ constr_DT = Datatype(name="MultiplyConstrainedDatatype", description="Datatype with several BCs of the same type", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(builtin_type) counts = {} bad_ruletypes = set() for curr_ruletype, curr_rule in rules: try: counts[curr_ruletype] += 1 bad_ruletypes.add(curr_ruletype) except KeyError: counts[curr_ruletype] = 1 constr_DT.basic_constraints.create(ruletype=curr_ruletype, rule="{}".format(curr_rule)) possible_matches = [re.escape('Datatype "{}" has {} constraints of type {}, but should have at most one'. format(constr_DT, counts[x], x)) for x in bad_ruletypes] match_pattern = "|".join(possible_matches) self.assertRaisesRegexp(ValidationError, match_pattern, constr_DT.clean) def test_clean_int_multiple_min_val_bad(self): """ Testing clean() on an integer Datatype with multiple MIN_VAL restrictions. """ self.__test_clean_multiple_same_BCs_bad_h( self.INT, [(BasicConstraint.MIN_VAL, 6), (BasicConstraint.MIN_VAL, 8)], BasicConstraint.MIN_VAL ) def test_clean_float_multiple_max_val_bad(self): """ Testing clean() on a float Datatype with multiple MAX_VAL restrictions. """ self.__test_clean_multiple_same_BCs_bad_h( self.FLOAT, [(BasicConstraint.MAX_VAL, 1220), (BasicConstraint.MAX_VAL, 6)], BasicConstraint.MAX_VAL ) def test_clean_str_multiple_min_length_bad(self): """ Testing clean() on a string Datatype with multiple MIN_LENGTH restrictions. """ self.__test_clean_multiple_same_BCs_bad_h( self.STR, [(BasicConstraint.MIN_LENGTH, 1), (BasicConstraint.MIN_LENGTH, 2)], BasicConstraint.MIN_LENGTH ) def test_clean_str_multiple_max_length_bad(self): """ Testing clean() on a string Datatype with multiple MAX_LENGTH restrictions. """ self.__test_clean_multiple_same_BCs_bad_h( self.STR, [(BasicConstraint.MAX_LENGTH, 7), (BasicConstraint.MAX_LENGTH, 4), (BasicConstraint.MAX_LENGTH, 7)], BasicConstraint.MAX_LENGTH ) def test_clean_str_multiple_dtf_bad(self): """ Testing clean() on a string Datatype with multiple DATETIMEFORMAT restrictions. """ self.__test_clean_multiple_same_BCs_bad_h( self.STR, [(BasicConstraint.DATETIMEFORMAT, "%Y"), (BasicConstraint.DATETIMEFORMAT, "foo"), (BasicConstraint.MAX_LENGTH, 7)], BasicConstraint.DATETIMEFORMAT ) def test_clean_float_some_good_some_multiple_BCs_bad(self): """ Testing clean() on a float Datatype with several BCs but with at least one multiply-defined. """ # Note that here, both MIN_VAL and MAX_VAL are multiply-defined, # so either one could fail. That's why we pass a regexp for # multiple_BC_type. self.__test_clean_multiple_same_BCs_bad_h( self.FLOAT, [(BasicConstraint.MIN_VAL, "7"), (BasicConstraint.MAX_VAL, "15"), (BasicConstraint.MIN_VAL, "13"), (BasicConstraint.REGEXP, "[1-9]+"), (BasicConstraint.MAX_VAL, "19")], "(?:{}|{})".format(BasicConstraint.MIN_VAL, BasicConstraint.MAX_VAL) ) def _setup_datatype(self, name, desc, rules, restricts): """ Helper function to create a Datatype. Rules is a list of tuples (ruletype, rule), and restricts is a list of supertypes. """ dt = Datatype(name=name, description=desc, user=self.myUser) dt.full_clean() dt.save() for supertype in restricts: dt.restricts.add(supertype) for ruletype, rule in rules: if ruletype: dt.basic_constraints.create(ruletype=ruletype, rule=rule) return dt def _setup_inheriting_datatype(self, super_name, super_desc, super_ruletype, super_rule, super_builtin, cnstr_name, cnstr_desc, cnstr_ruletype, cnstr_rule): """ Helper function to create a pair of Datatypes, one inheriting from the other. """ super_DT = self._setup_datatype(super_name, super_desc, [(super_ruletype, super_rule)], [super_builtin]) constr_DT = self._setup_datatype(cnstr_name, cnstr_desc, [(cnstr_ruletype, cnstr_rule)], [super_DT]) return (super_DT, constr_DT) def _setup_inheriting_datatype2(self, super1_name, super1_desc, super1_ruletype, super1_rule, super1_builtin, super2_name, super2_desc, super2_ruletype, super2_rule, super2_builtin, constr_name, constr_desc, constr_ruletype, constr_rule): """ Helper function to create three Datatypes, the first two being supertypes of the third. """ super1_DT, constr_DT = self._setup_inheriting_datatype( super1_name, super1_desc, super1_ruletype, super1_rule, super1_builtin, constr_name, constr_desc, constr_ruletype, constr_rule) super2_DT = self._setup_datatype(super2_name, super2_desc, [(super2_ruletype, super2_rule)], [super2_builtin]) constr_DT.restricts.add(super2_DT) return (super1_DT, super2_DT, constr_DT) #### def __test_clean_num_constraint_conflicts_with_supertypes_h(self, builtin_type, BC_type, constr_val, supertype_constr_val): """ Helper to test cases where numerical constraints conflict with those of the supertypes. """ super_DT, constr_DT = self._setup_inheriting_datatype( "ParentDT", "Parent with constraint", BC_type, supertype_constr_val, builtin_type, "ConstrDT", "Datatype whose constraint conflicts with parent", BC_type, constr_val) if BC_type == BasicConstraint.MIN_LENGTH: error_msg = 'Datatype "{}" has MIN_LENGTH {}, but its supertype "{}" has a longer or equal MIN_LENGTH of {}' elif BC_type == BasicConstraint.MAX_LENGTH: error_msg = ('Datatype "{}" has MAX_LENGTH {}, but its supertype ' '"{}" has a shorter or equal MAX_LENGTH of {}') elif BC_type == BasicConstraint.MIN_VAL: error_msg = 'Datatype "{}" has MIN_VAL {}, but its supertype "{}" has a larger or equal MIN_VAL of {}' elif BC_type == BasicConstraint.MAX_VAL: error_msg = 'Datatype "{}" has MAX_VAL {}, but its supertype "{}" has a smaller or equal MAX_VAL of {}' self.assertRaisesRegexp(ValidationError, re.escape(error_msg.format(constr_DT, constr_val, super_DT, supertype_constr_val)), constr_DT.clean) def test_clean_int_min_val_supertype_conflict_bad(self): """ Testing clean() on an integer whose MIN_VAL conflicts with its supertypes'. """ self.__test_clean_num_constraint_conflicts_with_supertypes_h( self.INT, BasicConstraint.MIN_VAL, 7, 9 ) def test_clean_float_max_val_supertype_conflict_bad(self): """ Testing clean() on an integer whose MIN_VAL conflicts with its supertypes'. """ self.__test_clean_num_constraint_conflicts_with_supertypes_h( self.FLOAT, BasicConstraint.MAX_VAL, 11, 10.7 ) def test_clean_str_min_length_supertype_conflict_bad(self): """ Testing clean() on an integer whose MIN_VAL conflicts with its supertypes'. """ self.__test_clean_num_constraint_conflicts_with_supertypes_h( self.STR, BasicConstraint.MIN_LENGTH, 9, 10 ) def test_clean_str_max_length_supertype_conflict_bad(self): """ Testing clean() on an integer whose MAX_VAL conflicts with its supertypes'. """ self.__test_clean_num_constraint_conflicts_with_supertypes_h( self.STR, BasicConstraint.MAX_LENGTH, 223, 20 ) def test_clean_dtf_conflict_with_supertype_bad(self): """ Testing clean() on the case where a Datatype has a DATETIMEFORMAT but so does its supertype. """ _super_DT, constr_DT = self._setup_inheriting_datatype( "DateTimeDT", "String with a DATETIMEFORMAT", BasicConstraint.DATETIMEFORMAT, "%Y %b %d", self.STR, "OverwritingDateTimeDT", "String with a DATETIMEFORMAT whose parent also has one", BasicConstraint.DATETIMEFORMAT, "%Y-%b-%d") self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" should have only one DATETIMEFORMAT restriction acting on ' 'it, but it has {}'.format(constr_DT, 2))), constr_DT.clean) def test_clean_several_supertypes_have_dtfs_bad(self): """ Testing clean() on the case where a Datatype has several supertypes with DATETIMEFORMATs. """ dtf = BasicConstraint.DATETIMEFORMAT _super_DT, _second_DT, constr_DT = self._setup_inheriting_datatype2( "DateTimeDT", "String with a DATETIMEFORMAT", dtf, "%Y %b %d", self.STR, "OverwritingDateTimeDT", "Second string with a DATETIMEFORMAT", dtf, "%Y %b %d", self.STR, "OverwritingDateTimeChildDT", "String with a DATETIMEFORMAT whose parent also has one", dtf, "%Y %b %d") self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" should have only one DATETIMEFORMAT restriction acting on ' 'it, but it has {}'.format(constr_DT, 3))), constr_DT.clean) def test_clean_dtf_several_supertypes_one_has_dtf_bad(self): """ Testing clean() on the case where a Datatype has a DATETIMEFORMAT and several supertypes, one which has one. """ dtf = BasicConstraint.DATETIMEFORMAT _super_DT, _second_DT, constr_DT = self._setup_inheriting_datatype2( "DateTimeDT", "String with a DATETIMEFORMAT", dtf, "%Y %b %d", self.STR, "OtherDT", "String by a different name", None, None, self.STR, "OverwritingDateTimeDT", "String with a DATETIMEFORMAT whose parent also has one", dtf, "%Y %d") self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" should have only one DATETIMEFORMAT restriction acting on ' 'it, but it has {}'.format(constr_DT, 2))), constr_DT.clean) def test_clean_dtf_several_supertypes_one_has_dtf_other_is_builtin_bad(self): """ Testing clean() on a DATETIMEFORMATted Datatype with two supertypes: STR and another DTFd Datatype. """ dtf = BasicConstraint.DATETIMEFORMAT _super_DT, constr_DT = self._setup_inheriting_datatype( "DateTimeDT", "String with a DATETIMEFORMAT", dtf, "%Y %b %d", self.STR, "OverwritingDateTimeDT", "String with a DATETIMEFORMAT whose parent also has one", dtf, "%Y %d") constr_DT.restricts.add(self.STR) self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" should have only one DATETIMEFORMAT restriction acting on ' 'it, but it has {}'.format(constr_DT, 2))), constr_DT.clean) def test_clean_float_conflicting_min_max_val_bad(self): """ Testing clean() on a float Datatype with conflicting MIN|MAX_VAL defined directly. """ constr_DT = self._setup_datatype( "ConflictingBoundsDT", "Float with conflicting MIN|MAX_VAL", [(BasicConstraint.MIN_VAL, "15"), (BasicConstraint.MAX_VAL, "5")], [self.FLOAT]) self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" has effective MIN_VAL {} exceeding its effective MAX_VAL {}' .format(constr_DT, 15, 5))), constr_DT.clean) def test_clean_int_conflicting_inherited_min_max_val_bad(self): """ Testing clean() on an int Datatype with conflicting MIN|MAX_VAL defined on its supertypes. """ _, _, constr_DT = self._setup_inheriting_datatype2( "BoundedFloatDT", "Float with a MIN_VAL", BasicConstraint.MIN_VAL, "20", self.FLOAT, "BoundedIntDT", "Int with a MAX_VAL", BasicConstraint.MAX_VAL, "18.2", self.INT, "InheritingBadBoundsDT", "Datatype inheriting conflicting MIN|MAX_VAL", None, None) self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" has effective MIN_VAL {} exceeding its effective MAX_VAL {}' .format(constr_DT, 20, 18.2))), constr_DT.clean) def test_clean_float_conflicting_half_inherited_min_max_val_bad(self): """ Testing clean() on a float Datatype with conflicting MIN|MAX_VAL, one inherited and one directly. """ super_DT = Datatype(name="BoundedDT", description="Float with a MIN_VAL", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.FLOAT) super_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_VAL, rule="17.7") constr_DT = Datatype(name="ConflictingBoundsDT", description="Float with half-inherited conflicting MIN|MAX_VAL", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(super_DT) constr_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_VAL, rule="6") self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" has effective MIN_VAL {} exceeding its effective MAX_VAL {}' .format(constr_DT, 17.7, 6))), constr_DT.clean) def test_clean_int_min_max_val_too_narrow_bad(self): """ Testing clean() on an integer Datatype whose MIN|MAX_VAL do not admit any integers. """ constr_DT = Datatype(name="ConflictingBoundsDT", description="INT with MIN|MAX_VAL too narrow", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(self.INT) constr_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_VAL, rule="15.7") constr_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_VAL, rule="15.9") self.assertRaisesRegexp( ValidationError, re.escape((('Datatype "{}" has built-in type INT, but there are no integers between its ' 'effective MIN_VAL {} and its effective MAX_VAL {}').format( constr_DT, 15.7, 15.9))), constr_DT.clean) def test_clean_int_inherited_min_max_val_too_narrow_bad(self): """ Testing clean() on an integer Datatype whose inherited MIN|MAX_VAL do not admit any integers. """ super_DT = Datatype(name="BoundedFloatDT", description="Float with a MIN_VAL", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.FLOAT) super_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_VAL, rule="20.2") second_DT = Datatype(name="BoundedIntDT", description="Int with a MAX_VAL", user=self.myUser) second_DT.full_clean() second_DT.save() second_DT.restricts.add(self.INT) second_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_VAL, rule="20.55") constr_DT = Datatype(name="InheritingBadBoundsDT", description="Datatype inheriting too-narrow MIN|MAX_VAL", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(super_DT) constr_DT.restricts.add(second_DT) self.assertRaisesRegexp( ValidationError, re.escape((('Datatype "{}" has built-in type INT, but there ' 'are no integers between its effective MIN_VAL {} ' 'and its effective MAX_VAL {}').format( constr_DT, 20.2, 20.55))), constr_DT.clean) def test_clean_int_half_inherited_min_max_val_too_narrow_bad(self): """ Testing clean() on a float Datatype with half-inherited MIN|MAX_VAL that are too narrow. """ super_DT = Datatype(name="BoundedDT", description="Float with a MIN_VAL", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.FLOAT) super_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_VAL, rule="17.1") constr_DT = Datatype(name="NarrowBoundsDT", description="INT with half-inherited too-narrow MIN|MAX_VAL", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(super_DT) constr_DT.restricts.add(self.INT) constr_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_VAL, rule="17.8") self.assertRaisesRegexp( ValidationError, re.escape((('Datatype "{}" has built-in type INT, but there are no ' 'integers between its effective MIN_VAL {} and its ' 'effective MAX_VAL {}').format(constr_DT, 17.1, 17.8))), constr_DT.clean) def test_clean_str_conflicting_min_max_length_bad(self): """ Testing clean() on a string Datatype with conflicting MIN|MAX_LENGTH defined directly. """ constr_DT = Datatype(name="ConflictingBoundsDT", description="String with conflicting MIN|MAX_LENGTH", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(self.STR) constr_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_LENGTH, rule="2234") constr_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_LENGTH, rule="6") self.assertRaisesRegexp( ValidationError, re.escape(('Datatype "{}" has effective MIN_LENGTH {} exceeding ' 'its effective MAX_LENGTH {}'.format(constr_DT, 2234, 6))), constr_DT.clean) def test_clean_str_conflicting_inherited_min_max_length_bad(self): """ Testing clean() on a string Datatype with conflicting MIN|MAX_LENGTH defined on its supertypes. """ super_DT = Datatype(name="BoundedMinDT", description="String with a MIN_LENGTH", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.STR) super_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_LENGTH, rule="44") second_DT = Datatype(name="BoundedMaxDT", description="String with a MAX_LENGTH", user=self.myUser) second_DT.full_clean() second_DT.save() second_DT.restricts.add(self.STR) second_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_LENGTH, rule="22") constr_DT = Datatype(name="InheritingBadBoundsDT", description="Datatype inheriting conflicting MIN|MAX_LENGTH", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(super_DT) constr_DT.restricts.add(second_DT) self.assertRaisesRegexp( ValidationError, re.escape(('Datatype "{}" has effective MIN_LENGTH {} exceeding ' 'its effective MAX_LENGTH {}'.format(constr_DT, 44, 22))), constr_DT.clean) def test_clean_str_conflicting_half_inherited_min_max_length_bad(self): """ Testing clean() on a string Datatype with conflicting MIN|MAX_LENGTH, one inherited and one direct. """ super_DT = Datatype(name="BoundedDT", description="String with a MIN_LENGTH", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.STR) super_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_LENGTH, rule="20") constr_DT = Datatype(name="HalfInheritingBadBoundsDT", description="Datatype inheriting conflicting MIN|MAX_LENGTH", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(super_DT) constr_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_LENGTH, rule="30") self.assertRaisesRegexp( ValidationError, re.escape(('Datatype "{}" has effective MIN_LENGTH {} exceeding ' 'its effective MAX_LENGTH {}'.format(constr_DT, 30, 20))), constr_DT.clean) # FIXME: add some tests here when CustomConstraints are fully-coded. #### # Tests of is_complete() and complete_clean(). def test_is_complete_unsaved(self): """ Tests is_complete() on an unsaved Datatype (returns False). """ my_DT = Datatype(name="IncompleteDT", description="Non-finished Datatype", user=self.myUser) my_DT.full_clean() self.assertEquals(my_DT.is_complete(), False) def test_is_complete_incomplete(self): """ Tests is_complete() on a saved but incomplete Datatype (returns False). """ my_DT = Datatype(name="IncompleteDT", description="Non-finished Datatype", user=self.myUser) my_DT.full_clean() my_DT.save() self.assertEquals(my_DT.is_complete(), False) def test_is_complete_restricts_string(self): """ Tests is_complete() on a complete Datatype that restricts STR (returns True). """ my_DT = Datatype(name="IncompleteDT", description="Non-finished Datatype", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.STR) self.assertEquals(my_DT.is_complete(), True) def test_is_complete_restricts_others(self): """ Tests is_complete() on a complete Datatype that restricts other Datatypes (returns True). """ super_DT = Datatype(name="SuperDT", description="Supertype", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.STR) middle_DT = Datatype(name="MiddleDT", description="Middle type", user=self.myUser) middle_DT.full_clean() middle_DT.save() middle_DT.restricts.add(super_DT) my_DT = Datatype(name="SubDT", description="Subtype", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(middle_DT, self.INT) self.assertEquals(middle_DT.is_complete(), True) self.assertEquals(my_DT.is_complete(), True) self.assertEquals(my_DT.is_complete(), True) def test_complete_clean_unsaved_bad(self): """ Tests complete_clean() on an unsaved Datatype. """ my_DT = Datatype(name="IncompleteDT", description="Non-finished Datatype", user=self.myUser) my_DT.full_clean() self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" does not restrict any of the Shipyard atomic Datatypes' .format(my_DT))), my_DT.complete_clean) def test_complete_clean_incomplete(self): """ Tests complete_clean() on a saved but incomplete Datatype. """ my_DT = Datatype(name="IncompleteDT", description="Non-finished Datatype", user=self.myUser) my_DT.full_clean() my_DT.save() self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" does not restrict any of the Shipyard atomic Datatypes' .format(my_DT))), my_DT.complete_clean) def test_complete_clean_restricts_string(self): """ Tests complete_clean() on a complete Datatype that restricts STR. """ my_DT = Datatype(name="IncompleteDT", description="Non-finished Datatype", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.STR) self.assertEquals(my_DT.complete_clean(), None) def test_complete_clean_restricts_others(self): """ Tests complete_clean() on a complete Datatype that restricts other Datatypes (returns True). """ super_DT = Datatype(name="SuperDT", description="Supertype", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.STR) middle_DT = Datatype(name="MiddleDT", description="Middle type", user=self.myUser) middle_DT.full_clean() middle_DT.save() middle_DT.restricts.add(super_DT) my_DT = Datatype(name="SubDT", description="Subtype", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(middle_DT, self.STR) self.assertEquals(middle_DT.complete_clean(), None) self.assertEquals(my_DT.complete_clean(), None) self.assertEquals(my_DT.complete_clean(), None) # Quick check of propagation. def test_complete_clean_propagate_from_clean(self): """ Testing complete_clean() on a string Datatype with conflicting MIN|MAX_LENGTH defined on its supertypes. """ super_DT = Datatype(name="BoundedMinDT", description="String with a MIN_LENGTH", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.STR) super_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_LENGTH, rule="44") second_DT = Datatype(name="BoundedMaxDT", description="String with a MAX_LENGTH", user=self.myUser) second_DT.full_clean() second_DT.save() second_DT.restricts.add(self.STR) second_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_LENGTH, rule="22") constr_DT = Datatype(name="InheritingBadBoundsDT", description="Datatype inheriting conflicting MIN|MAX_LENGTH", user=self.myUser) constr_DT.full_clean() constr_DT.save() constr_DT.restricts.add(super_DT) constr_DT.restricts.add(second_DT) self.assertRaisesRegexp(ValidationError, re.escape(('Datatype "{}" has effective MIN_LENGTH {} exceeding its effective ' 'MAX_LENGTH {}').format(constr_DT, 44, 22)), constr_DT.complete_clean) class DatatypeGetBuiltinTypeTests(MetadataTestCase): """ Tests of the Datatype.get_builtin_type() function. """ def test_on_builtins(self): """ Testing on the built-in Shipyard types. """ self.assertEquals(self.STR.get_builtin_type(), self.STR) self.assertEquals(self.INT.get_builtin_type(), self.INT) self.assertEquals(self.FLOAT.get_builtin_type(), self.FLOAT) self.assertEquals(self.BOOL.get_builtin_type(), self.BOOL) ######## def __test_on_direct_builtin_descendant_h(self, builtin_type): """ Helper for testing on direct descendants on the builtins. """ my_DT = Datatype(name="DescendantDT", description="Descendant of builtin DT", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(builtin_type) self.assertEquals(my_DT.get_builtin_type(), builtin_type) def test_direct_descendant_int(self): """ Testing case where Datatype is a descendant of INT. """ self.__test_on_direct_builtin_descendant_h(self.INT) def test_direct_descendant_str(self): """ Testing case where Datatype is a descendant of STR. """ self.__test_on_direct_builtin_descendant_h(self.STR) def test_direct_descendant_float(self): """ Testing case where Datatype is a descendant of FLOAT. """ self.__test_on_direct_builtin_descendant_h(self.FLOAT) def test_direct_descendant_bool(self): """ Testing case where Datatype is a descendant of BOOL. """ self.__test_on_direct_builtin_descendant_h(self.BOOL) ######## def __test_supertype_precedence_h(self, builtin_types_to_restrict, most_restrictive_type): """ Helper for testing appropriate supertype precedence. """ my_DT = Datatype(name="InheritingDT", description="Datatype with several supertypes", user=self.myUser) my_DT.full_clean() my_DT.save() for to_restrict in builtin_types_to_restrict: my_DT.restricts.add(to_restrict) self.assertEquals(my_DT.get_builtin_type(), most_restrictive_type) def test_supertype_precedence_float_over_str(self): """ FLOAT should take precedence over STR. """ self.__test_supertype_precedence_h([self.STR, self.FLOAT], self.FLOAT) def test_supertype_precedence_int_over_str(self): """ INT should take precedence over STR. """ self.__test_supertype_precedence_h([self.STR, self.INT], self.INT) def test_supertype_precedence_bool_over_str(self): """ BOOL should take precedence over STR. """ self.__test_supertype_precedence_h([self.BOOL, self.STR], self.BOOL) def test_supertype_precedence_int_over_float(self): """ INT should take precedence over FLOAT. """ self.__test_supertype_precedence_h([self.INT, self.FLOAT], self.INT) def test_supertype_precedence_bool_over_float(self): """ BOOL should take precedence over FLOAT. """ self.__test_supertype_precedence_h([self.FLOAT, self.BOOL], self.BOOL) def test_supertype_precedence_bool_over_int(self): """ BOOL should take precedence over INT. """ self.__test_supertype_precedence_h([self.INT, self.BOOL], self.BOOL) def test_supertype_precedence_multiple(self): """ Testing precendence when there are several builtins restricted. """ self.__test_supertype_precedence_h([self.INT, self.BOOL, self.STR], self.BOOL) ######## def test_multiple_supertypes(self): """ Testing case where Datatype has multiple supertypes of varying generations. """ super_DT = Datatype(name="SuperDT", description="Super DT", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.FLOAT) super2_DT = Datatype(name="SuperDT2", description="Super DT 2", user=self.myUser) super2_DT.full_clean() super2_DT.save() super2_DT.restricts.add(self.STR) super3_DT = Datatype(name="SuperDT3", description="Super DT 3", user=self.myUser) super3_DT.full_clean() super3_DT.save() super3_DT.restricts.add(super_DT) my_DT = Datatype(name="DescendantDT", description="Descendant of several supertypes", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(super2_DT) my_DT.restricts.add(super3_DT) self.assertEquals(my_DT.get_builtin_type(), self.FLOAT) def test_multiple_supertypes_2(self): """ Another testing case where Datatype has multiple supertypes of varying generations. """ super_DT = Datatype(name="SuperDT", description="Super DT", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.FLOAT) super2_DT = Datatype(name="SuperDT2", description="Super DT 2", user=self.myUser) super2_DT.full_clean() super2_DT.save() super2_DT.restricts.add(self.BOOL) super3_DT = Datatype(name="SuperDT3", description="Super DT 3", user=self.myUser) super3_DT.full_clean() super3_DT.save() super3_DT.restricts.add(super_DT) my_DT = Datatype(name="DescendantDT", description="Descendant of several supertypes", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(super2_DT) my_DT.restricts.add(super3_DT) self.assertEquals(my_DT.get_builtin_type(), self.BOOL) class DatatypeCheckBasicConstraints(MetadataTestCase): """ Tests of Datatype.check_basic_constraints(). """ def __test_builtin_type_good_h(self, builtin_type, string_to_check): """ Helper for testing good cases where the input conforms to the appropriate built-in type. """ my_DT = Datatype(name="MyDT", description="Non-builtin datatype", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(builtin_type) # Check builtin type too. self.assertEquals(builtin_type.check_basic_constraints(string_to_check), []) self.assertEquals(my_DT.check_basic_constraints(string_to_check), []) my_DT.delete() def test_str_good(self): """ Testing case of a string with no constraints. """ self.__test_builtin_type_good_h(self.STR, "foo") def test_float_good(self): """ Testing case of a float with no constraints. """ self.__test_builtin_type_good_h(self.FLOAT, "3.14") def test_int_good(self): """ Testing case of an int with no constraints. """ self.__test_builtin_type_good_h(self.INT, "-8") def test_bool_good(self): """ Testing case of an int with no constraints. """ self.__test_builtin_type_good_h(self.BOOL, "True") self.__test_builtin_type_good_h(self.BOOL, "TRUE") self.__test_builtin_type_good_h(self.BOOL, "true") self.__test_builtin_type_good_h(self.BOOL, "T") self.__test_builtin_type_good_h(self.BOOL, "t") self.__test_builtin_type_good_h(self.BOOL, "1") self.__test_builtin_type_good_h(self.BOOL, "False") self.__test_builtin_type_good_h(self.BOOL, "FALSE") self.__test_builtin_type_good_h(self.BOOL, "false") self.__test_builtin_type_good_h(self.BOOL, "F") self.__test_builtin_type_good_h(self.BOOL, "f") self.__test_builtin_type_good_h(self.BOOL, "0") def __test_builtin_type_bad_h(self, builtin_type, string_to_check): """ Helper for testing cases where the input does not conform to the appropriate built-in type. """ my_DT = Datatype(name="MyDT", description="Non-builtin datatype", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(builtin_type) builtin_type_error = "" if builtin_type == self.FLOAT: builtin_type_error = "Was not float" elif builtin_type == self.INT: builtin_type_error = "Was not integer" elif builtin_type == self.BOOL: builtin_type_error = "Was not Boolean" # Check builtin type too. self.assertEquals(builtin_type.check_basic_constraints(string_to_check), [builtin_type_error]) self.assertEquals(my_DT.check_basic_constraints(string_to_check), [builtin_type_error]) def test_float_error(self): """ Testing case where string cannot be cast to a float. """ self.__test_builtin_type_bad_h(self.FLOAT, "foo") def test_int_error(self): """ Testing case where string cannot be cast to an int. """ self.__test_builtin_type_bad_h(self.INT, "1.72") def test_bool_error(self): """ Testing case where string cannot be cast to a Boolean. """ self.__test_builtin_type_bad_h(self.BOOL, "maybe") # Test that "Was not [builtin type]" overrules other constraints. def __test_builtin_type_with_constraint_bad_h(self, builtin_type, BC_type, constr_val, string_to_check): """ Helper for testing cases where the input does not conform to the appropriate built-in type. """ my_DT = Datatype(name="MyDT", description="Non-builtin datatype", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(builtin_type) my_DT.basic_constraints.create(ruletype=BC_type, rule="{}".format(constr_val)) builtin_type_error = "" if builtin_type == self.FLOAT: builtin_type_error = "Was not float" elif builtin_type == self.INT: builtin_type_error = "Was not integer" elif builtin_type == self.BOOL: builtin_type_error = "Was not Boolean" self.assertEquals(my_DT.check_basic_constraints(string_to_check), [builtin_type_error]) def test_float_error_with_constraint(self): """ Testing case where string cannot be cast to a float and the Datatype has a constraint. """ self.__test_builtin_type_with_constraint_bad_h(self.FLOAT, BasicConstraint.MIN_VAL, 8, "foo") def test_int_error_with_constraint(self): """ Testing case where string cannot be cast to an integer and the Datatype has a constraint. """ self.__test_builtin_type_with_constraint_bad_h(self.INT, BasicConstraint.MAX_VAL, 17, "1.2") def test_bool_error_with_constraint(self): """ Testing case where string cannot be cast to an integer and the Datatype has a constraint. """ self.__test_builtin_type_with_constraint_bad_h(self.BOOL, BasicConstraint.REGEXP, ".*", "what") ######## def __test_numerical_constraint_h(self, builtin_type, BC_type, constr_val, string_to_check, passes_constraint=True): """ Helper to test strings against numerical constraints. """ my_DT = Datatype(name="MyDT", description="Datatype with numerical BC", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(builtin_type) my_BC = my_DT.basic_constraints.create(ruletype=BC_type, rule="{}".format(constr_val)) if passes_constraint: self.assertEquals(my_DT.check_basic_constraints(string_to_check), []) else: self.assertEquals(my_DT.check_basic_constraints(string_to_check), [my_BC]) #### def test_min_length_pass(self): """ Testing case where a string passes the MIN_LENGTH restriction. """ self.__test_numerical_constraint_h(self.STR, BasicConstraint.MIN_LENGTH, 5, "foobar", passes_constraint=True) def test_min_length_edge_pass(self): """ Testing case where a string just passes (edge-condition) the MIN_LENGTH restriction. """ self.__test_numerical_constraint_h(self.STR, BasicConstraint.MIN_LENGTH, 11, "hello world", passes_constraint=True) def test_min_length_fail(self): """ Testing case where a string fails the MIN_LENGTH restriction. """ self.__test_numerical_constraint_h(self.STR, BasicConstraint.MIN_LENGTH, 100, "short string", passes_constraint=False) def test_min_length_edge_fail(self): """ Testing case where a string just fails (edge-condition) the MIN_LENGTH restriction. """ self.__test_numerical_constraint_h(self.STR, BasicConstraint.MIN_LENGTH, 8, "bye all", passes_constraint=False) #### def test_max_length_pass(self): """ Testing case where a string passes the MAX_LENGTH restriction. """ self.__test_numerical_constraint_h(self.STR, BasicConstraint.MAX_LENGTH, 2, "Hi", passes_constraint=True) def test_max_length_edge_pass(self): """ Testing case where a string just passes (edge-condition) the MAX_LENGTH restriction. """ self.__test_numerical_constraint_h(self.STR, BasicConstraint.MAX_LENGTH, 27, "onetwothreefourfive and six", passes_constraint=True) def test_max_length_fail(self): """ Testing case where a string fails the MAX_LENGTH restriction. """ self.__test_numerical_constraint_h(self.STR, BasicConstraint.MAX_LENGTH, 10, "Hello everyone", passes_constraint=False) def test_max_length_edge_fail(self): """ Testing case where a string just fails (edge-condition) the MAX_LENGTH restriction. """ self.__test_numerical_constraint_h(self.STR, BasicConstraint.MAX_LENGTH, 10, "Hello world", passes_constraint=False) #### def test_min_val_float_pass(self): """ Testing case where a float passes the MIN_VAL restriction. """ self.__test_numerical_constraint_h(self.FLOAT, BasicConstraint.MIN_VAL, 17, "100", passes_constraint=True) def test_min_val_float_edge_pass(self): """ Testing case where a float just passes (edge-condition) the MIN_VAL restriction. """ self.__test_numerical_constraint_h(self.FLOAT, BasicConstraint.MIN_VAL, -1722.4, "-1722.4", passes_constraint=True) def test_min_val_float_fail(self): """ Testing case where a float fails the MIN_VAL restriction. """ self.__test_numerical_constraint_h(self.FLOAT, BasicConstraint.MIN_VAL, 17, "14", passes_constraint=False) # Note that there isn't an "edge fail" case here. #### def test_max_val_float_pass(self): """ Testing case where a float passes the MAX_VAL restriction. """ self.__test_numerical_constraint_h(self.FLOAT, BasicConstraint.MAX_VAL, -100090, "-111117.445", passes_constraint=True) def test_max_val_float_edge_pass(self): """ Testing case where a float just passes (edge-condition) the MIN_VAL restriction. """ self.__test_numerical_constraint_h(self.FLOAT, BasicConstraint.MAX_VAL, 42.77, "42.77", passes_constraint=True) def test_max_val_float_fail(self): """ Testing case where a float fails the MAX_VAL restriction. """ self.__test_numerical_constraint_h(self.FLOAT, BasicConstraint.MAX_VAL, -17, "-1", passes_constraint=False) # As above there is no "edge fail" here. #### def test_min_val_int_pass(self): """ Testing case where an integer passes the MIN_VAL restriction. """ self.__test_numerical_constraint_h(self.INT, BasicConstraint.MIN_VAL, -4, "6", passes_constraint=True) def test_min_val_int_edge_pass(self): """ Testing case where an integer just passes (edge-condition) the MIN_VAL restriction. """ self.__test_numerical_constraint_h(self.INT, BasicConstraint.MIN_VAL, 165, "165", passes_constraint=True) def test_min_val_int_fail(self): """ Testing case where an integer fails the MIN_VAL restriction. """ self.__test_numerical_constraint_h(self.FLOAT, BasicConstraint.MIN_VAL, 3, "-2", passes_constraint=False) def test_min_val_int_edge_fail(self): """ Testing case where an integer just fails (edge-condition) the MIN_VAL restriction. """ self.__test_numerical_constraint_h(self.FLOAT, BasicConstraint.MIN_VAL, 7, "6", passes_constraint=False) #### def test_max_val_int_pass(self): """ Testing case where an integer passes the MAX_VAL restriction. """ self.__test_numerical_constraint_h(self.INT, BasicConstraint.MAX_VAL, 85, "3", passes_constraint=True) def test_max_val_int_edge_pass(self): """ Testing case where an integer just passes (edge-condition) the MAX_VAL restriction. """ self.__test_numerical_constraint_h(self.INT, BasicConstraint.MAX_VAL, -92, "-92", passes_constraint=True) def test_max_val_int_fail(self): """ Testing case where an integer fails the MAX_VAL restriction. """ self.__test_numerical_constraint_h(self.FLOAT, BasicConstraint.MAX_VAL, 3, "44", passes_constraint=False) def test_max_val_int_edge_fail(self): """ Testing case where an integer just fails (edge-condition) the MAX_VAL restriction. """ self.__test_numerical_constraint_h(self.FLOAT, BasicConstraint.MAX_VAL, 7, "8", passes_constraint=False) #### def __test_regexp_h(self, builtin_type, constr_val, string_to_check, passes_constraint=True): """ Helper to test strings against a REGEXP constraints. """ my_DT = Datatype(name="MyDT", description="Datatype with REGEXP BC", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(builtin_type) my_BC = my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="{}".format(constr_val)) if passes_constraint: self.assertEquals(my_DT.check_basic_constraints(string_to_check), []) else: self.assertEquals(my_DT.check_basic_constraints(string_to_check), [my_BC]) def test_str_regexp_pass(self): """ Test a string against a REGEXP it satisfies. """ self.__test_regexp_h(self.STR, "[a-z]+", "123abc", passes_constraint=True) def test_str_regexp_fail(self): """ Test a string against a REGEXP it does not satisfy. """ self.__test_regexp_h(self.STR, "foo|bar", "123abc", passes_constraint=False) def test_float_regexp_pass(self): """ Test a float against a REGEXP it satisfies. """ self.__test_regexp_h(self.FLOAT, "[1-9]+\.663", "1325.663", passes_constraint=True) def test_float_regexp_fail(self): """ Test a float against a REGEXP it doesn't satisfy. """ self.__test_regexp_h(self.FLOAT, "1065[0-9]+", "132544", passes_constraint=False) def test_int_regexp_pass(self): """ Test an int against a REGEXP it satisfies. """ self.__test_regexp_h(self.INT, ".+", "4444", passes_constraint=True) def test_int_regexp_fail(self): """ Test an int against a REGEXP it doesn't satisfy. """ self.__test_regexp_h(self.INT, "[1-9]{4}", "-1000", passes_constraint=False) def test_bool_regexp_pass(self): """ Test a Boolean against a REGEXP it satisfies. """ self.__test_regexp_h(self.BOOL, "True|TRUE|true|t|1", "True", passes_constraint=True) def test_bool_regexp_fail(self): """ Test a Boolean against a REGEXP it doesn't satisfy. """ self.__test_regexp_h(self.STR, "False", "True", passes_constraint=False) #### # Some test cases with combined restrictions. def test_str_multiple_restrictions_pass(self): """ Test a string against several restrictions. """ my_DT = Datatype(name="MyDT", description="Datatype with several restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.STR) my_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_LENGTH, rule="4") my_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_LENGTH, rule="7") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="foo...") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="...bar") self.assertEquals(my_DT.check_basic_constraints("foobar"), []) def test_str_multiple_restrictions_fail(self): """ Test a string against several restrictions, some of which fail. """ my_DT = Datatype(name="MyDT", description="Datatype with several restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.STR) my_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_LENGTH, rule="4") my_max_length = my_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_LENGTH, rule="5") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="foo...") my_regexp_2 = my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="...baz") constr_fail = my_DT.check_basic_constraints("foobar") self.assertEquals(len(constr_fail), 2) self.assertEquals(my_max_length in constr_fail, True) self.assertEquals(my_regexp_2 in constr_fail, True) def test_float_multiple_restrictions_pass(self): """ Test a float against several restrictions, all of which pass. """ my_DT = Datatype(name="MyDT", description="Datatype with several restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.FLOAT) my_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_VAL, rule="1999") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="^....$") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="..14") self.assertEquals(my_DT.check_basic_constraints("2014"), []) def test_float_multiple_restrictions_fail(self): """ Test a float against several restrictions, some of which fail. """ my_DT = Datatype(name="MyDT", description="Datatype with several restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.FLOAT) my_min_val = my_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_VAL, rule="1999") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="^....$") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="..14") self.assertEquals(my_DT.check_basic_constraints("2014"), [my_min_val]) def test_int_multiple_restrictions_pass(self): """ Test an int against several restrictions, all of which pass. """ my_DT = Datatype(name="MyDT", description="Datatype with several restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.INT) my_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_VAL, rule="2099") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="^....$") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="..35") self.assertEquals(my_DT.check_basic_constraints("2035"), []) def test_int_multiple_restrictions_fail(self): """ Test an int against several restrictions, some of which fail. """ my_DT = Datatype(name="MyDT", description="Datatype with several restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.INT) my_min_val = my_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_VAL, rule="2099") my_regexp = my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="^....$") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="35") constr_fail = my_DT.check_basic_constraints("935") self.assertEquals(len(constr_fail), 2) self.assertEquals(my_regexp in constr_fail, True) self.assertEquals(my_min_val in constr_fail, True) def test_bool_multiple_restrictions_pass(self): """ Test a Boolean against several restrictions, all of which pass. """ my_DT = Datatype(name="MyDT", description="Datatype with several restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.BOOL) my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="T...") my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="rue|RUE") self.assertEquals(my_DT.check_basic_constraints("True"), []) def test_bool_multiple_restrictions_fail(self): """ Test a Boolean against several restrictions, some of which fail. """ my_DT = Datatype(name="MyDT", description="Datatype with several restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(self.BOOL) my_regexp = my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="T...") my_regexp_2 = my_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="rue|RUE") constr_fail = my_DT.check_basic_constraints("False") self.assertEquals(len(constr_fail), 2) self.assertEquals(my_regexp in constr_fail, True) self.assertEquals(my_regexp_2 in constr_fail, True) #### # A couple of test cases for inherited constraints. # FIXME we need to think on this further! def test_str_inherit_restrictions(self): """ Testing a string against some inherited restrictions. """ super_DT = Datatype(name="SuperDT", description="Supertype", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.STR) my_regexp = super_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="Hello t....") my_DT = Datatype(name="MyDT", description="Datatype inheriting a restriction", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(super_DT) my_max_length = my_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_LENGTH, rule="12") self.assertEquals(my_DT.check_basic_constraints("Hello there"), []) self.assertEquals(my_DT.check_basic_constraints("Hello theremin"), [my_max_length]) self.assertEquals(my_DT.check_basic_constraints("Hello"), [my_regexp]) constr_fail = my_DT.check_basic_constraints("Goodbye everyone") self.assertEquals(len(constr_fail), 2) self.assertEquals(my_regexp in constr_fail, True) self.assertEquals(my_max_length in constr_fail, True) def test_float_inherit_restrictions(self): """ Testing a float against some inherited restrictions. """ super_DT = Datatype(name="SuperDT", description="Supertype", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.STR) super_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_LENGTH, rule="2") my_DT = Datatype(name="MyDT", description="Datatype inheriting a restriction", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(super_DT) my_DT.restricts.add(self.FLOAT) my_max_val = my_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_VAL, rule="95") self.assertEquals(my_DT.check_basic_constraints("82"), []) self.assertEquals(my_DT.check_basic_constraints(".7"), []) self.assertEquals(my_DT.check_basic_constraints("99"), [my_max_val]) # Note that since my_DT is no longer a STR, only my_max_val applies. self.assertEquals(my_DT.check_basic_constraints("114"), [my_max_val]) def test_int_inherit_restrictions(self): """ Testing an integer against some inherited restrictions. """ super_DT = Datatype(name="SuperDT", description="Supertype", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.STR) my_regexp = super_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="1000...") super2_DT = Datatype(name="SuperDT2", description="Supertype 2", user=self.myUser) super2_DT.full_clean() super2_DT.save() super2_DT.restricts.add(self.INT) my_min_val = super2_DT.basic_constraints.create(ruletype=BasicConstraint.MIN_VAL, rule="1000100") my_DT = Datatype(name="MyDT", description="Datatype inheriting restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(super_DT) my_DT.restricts.add(super2_DT) self.assertEquals(my_DT.check_basic_constraints("1000107"), []) self.assertEquals(my_DT.check_basic_constraints("1000004"), [my_min_val]) self.assertEquals(my_DT.check_basic_constraints("1099999"), [my_regexp]) constr_fail = my_DT.check_basic_constraints("99999") self.assertEquals(len(constr_fail), 2) self.assertEquals(my_regexp in constr_fail, True) self.assertEquals(my_min_val in constr_fail, True) def test_int_inherit_overridden_restriction(self): """ Testing an integer against an overridden inherited restriction. """ super_DT = Datatype(name="SuperDT", description="Supertype", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.INT) super_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_VAL, rule="999") my_DT = Datatype(name="MyDT", description="Datatype inheriting restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(super_DT) my_max_val = my_DT.basic_constraints.create(ruletype=BasicConstraint.MAX_VAL, rule="899") self.assertEquals(my_DT.check_basic_constraints("0"), []) self.assertEquals(my_DT.check_basic_constraints("950"), [my_max_val]) # super_max_val is overridden so only my_max_val should fail. self.assertEquals(my_DT.check_basic_constraints("1055"), [my_max_val]) def test_bool_inherit_restrictions(self): """ Testing a Boolean against some inherited restrictions. """ super_DT = Datatype(name="SuperDT", description="Supertype", user=self.myUser) super_DT.full_clean() super_DT.save() super_DT.restricts.add(self.BOOL) my_regexp = super_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule="T.+") my_regexp2 = super_DT.basic_constraints.create(ruletype=BasicConstraint.REGEXP, rule=".rue") my_DT = Datatype(name="MyDT", description="Datatype inheriting restrictions", user=self.myUser) my_DT.full_clean() my_DT.save() my_DT.restricts.add(super_DT) my_DT.restricts.add(self.BOOL) self.assertEquals(my_DT.check_basic_constraints("True"), []) self.assertEquals(my_DT.check_basic_constraints("true"), [my_regexp]) self.assertEquals(my_DT.check_basic_constraints("TRUE"), [my_regexp2]) self.assertEquals(set(my_DT.check_basic_constraints("False")), set([my_regexp, my_regexp2])) class CompoundDatatypeMemberTests(MetadataTestCase): def test_cdtMember_unicode(self): """ Unicode of compoundDatatypeMember should return (column index, datatype name, column name) """ self.assertEqual( unicode(self.test_cdt.members.get(column_idx=1)), "label: string" ) self.assertEqual( unicode(self.test_cdt.members.get(column_idx=2)), "PBMCseq: DNANucSeq" ) self.assertEqual( unicode(self.test_cdt.members.get(column_idx=3)), "PLAseq: RNANucSeq" ) class CompoundDatatypeTests(MetadataTestCase): def test_cdt_zero_member_unicode(self): """ Unicode of empty CompoundDatatype should be empty. """ empty_cdt = CompoundDatatype(user=self.myUser) empty_cdt.save() self.assertEqual(unicode(empty_cdt), "[empty CompoundDatatype]") def test_cdt_single_member_unicode(self): """ Unicode on single-member cdt returns its member. """ self.assertEqual(unicode(self.DNAinput_cdt), "(SeqToComplement: DNANucSeq)") def test_cdt_multiple_members_unicode(self): """ Unicode returns a list of its Datatype members. Each member is in the form of unicode(CompoundDatatypeMember). """ self.assertEqual( unicode(self.test_cdt), "(label: string, PBMCseq: DNANucSeq, PLAseq: RNANucSeq)") def test_cdt_four_members_short_name(self): self.basic_cdt.members.get(column_idx=5).delete() self.assertEqual( self.basic_cdt.short_name, "(label: string, integer: integer, float: float, bool: boolean)") def test_cdt_five_members_short_name(self): self.assertEqual( self.basic_cdt.short_name, "(label: string, integer: integer, float: float, plus 2 others)") def test_clean_single_index_good(self): """ CompoundDatatype with single index equalling 1. """ sad_cdt = CompoundDatatype(user=self.myUser) sad_cdt.save() sad_cdt.members.create(datatype=self.RNA_dt, column_name="ColumnTwo", column_idx=1) self.assertEqual(sad_cdt.clean(), None) def test_clean_single_index_bad(self): """ CompoundDatatype with single index not equalling 1. """ sad_cdt = CompoundDatatype(user=self.myUser) sad_cdt.save() sad_cdt.members.create(datatype=self.RNA_dt, column_name="ColumnTwo", column_idx=3) self.assertRaisesRegexp( ValidationError, re.escape(('Column indices of CompoundDatatype "{}" are not ' 'consecutive starting from 1'.format(sad_cdt))), sad_cdt.clean) def test_clean_consecutive_member_indices_correct(self): """ A CompoundDatatype with consecutive member indices passes clean. """ self.assertEqual(self.test_cdt.clean(), None) good_cdt = CompoundDatatype(user=self.myUser) good_cdt.save() good_cdt.members.create(datatype=self.RNA_dt, column_name="ColumnTwo", column_idx=2) good_cdt.members.create(datatype=self.DNA_dt, column_name="ColumnOne", column_idx=1) self.assertEqual(good_cdt.clean(), None) def test_clean_catches_consecutive_member_indices(self): """ A CompoundDatatype without consecutive member indices throws a ValidationError. """ bad_cdt = CompoundDatatype(user=self.myUser) bad_cdt.save() bad_cdt.members.create(datatype=self.RNA_dt, column_name="ColumnOne", column_idx=3) bad_cdt.members.create(datatype=self.DNA_dt, column_name="ColumnTwo", column_idx=1) self.assertRaisesRegexp( ValidationError, re.escape(('Column indices of CompoundDatatype "{}" are not ' 'consecutive starting from 1'.format(bad_cdt))), bad_cdt.clean) def test_clean_members_no_column_names(self): """ Datatype members must have column names. """ cdt = CompoundDatatype(user=self.myUser) cdt.save() cdt.members.create(datatype=self.RNA_dt, column_idx=1) self.assertRaisesRegexp(ValidationError, "{'column_name': \[u'This field cannot be blank.'\]}", cdt.clean) def test_copy_users_allowed(self): cdt_without_permissions = CompoundDatatype.objects.filter( users_allowed__isnull=True).first() cdt_with_permissions = CompoundDatatype.objects.filter( users_allowed__isnull=False).first() self.assertIsNotNone(cdt_without_permissions) self.assertIsNotNone(cdt_with_permissions) expected_permissions = set( cdt_with_permissions.users_allowed.values_list('username')) cdt_without_permissions.copy_permissions(cdt_with_permissions) permissions = set( cdt_without_permissions.users_allowed.values_list('username')) self.assertEqual(expected_permissions, permissions) def test_copy_groups_allowed(self): cdt_without_permissions = CompoundDatatype.objects.filter( groups_allowed__isnull=True).first() cdt_with_permissions = CompoundDatatype.objects.filter( groups_allowed__isnull=False).first() self.assertIsNotNone(cdt_without_permissions) self.assertIsNotNone(cdt_with_permissions) expected_permissions = set( cdt_with_permissions.groups_allowed.values_list('name')) cdt_without_permissions.copy_permissions(cdt_with_permissions) permissions = set( cdt_without_permissions.groups_allowed.values_list('name')) self.assertEqual(expected_permissions, permissions) def test_create_dataset_raw(self): """ Creating a raw Dataset should pass clean. """ path = os.path.join(samplecode_path, "doublet_cdt.csv") raw_dataset = Dataset.create_dataset(file_path=path, user=self.myUser, keep_file=True, name="something", description="desc") self.assertEqual(raw_dataset.clean(), None) def test_create_dataset_valid(self): """ Creating a Dataset with a CDT, where the file conforms, should be OK. """ path = os.path.join(samplecode_path, "doublet_cdt.csv") doublet_dataset = Dataset.create_dataset(file_path=path, user=self.myUser, cdt=self.doublet_cdt, keep_file=True, name="something", description="desc") self.assertEqual(doublet_dataset.clean(), None) self.assertEqual(doublet_dataset.structure.clean(), None) def test_create_dataset_bad_num_cols(self): """Define a dataset, but with the wrong number of headers.""" path = os.path.join(samplecode_path, "step_0_triplet_3_rows.csv") self.assertRaisesRegexp( ValueError, re.escape('The header of file "{}" does not match the CompoundDatatype "{}"' .format(path, self.doublet_cdt)), lambda: Dataset.create_dataset(file_path=path, user=self.myUser, cdt=self.doublet_cdt, name="DS1", description="DS1 desc")) def test_create_dataset_bad_col_names(self): """Define a dataset with the right number of header columns, but the wrong column names.""" path = os.path.join(samplecode_path, "three_random_columns.csv") self.assertRaisesRegexp( ValueError, re.escape('The header of file "{}" does not match the CompoundDatatype "{}"' .format(path, self.triplet_cdt)), lambda: Dataset.create_dataset(file_path=path, user=self.myUser, cdt=self.triplet_cdt, name="DS1", description="DS1 desc")) def test_type_constraints_row(self): # The cdt schema is (string, int, float, bool, rna) t1 = self.basic_cdt.check_constraints(['Once', 'upon', 'a', 'time', 'there']) t2 = self.basic_cdt.check_constraints(['was', '1', 'young', 'lazy', 'dev']) t3 = self.basic_cdt.check_constraints(['that', 'needed', '2', 'test', 'his']) t4 = self.basic_cdt.check_constraints(['datatype', 'as', 'a', 'True', 'which']) t5 = self.basic_cdt.check_constraints(['often', 'made', 'him', 'scream', 'UGGGG']) int_fail = u'Was not integer' float_fail = u'Was not float' bool_fail = u'Was not Boolean' rna_fail = u"Failed check 'regexp=^[ACGUacgu]*$'" self.assertEqual(t1, [[], [int_fail], [float_fail], [bool_fail], [rna_fail]]) self.assertEqual(t2, [[], [], [float_fail], [bool_fail], [rna_fail]]) self.assertEqual(t3, [[], [int_fail], [], [bool_fail], [rna_fail]]) self.assertEqual(t4, [[], [int_fail], [float_fail], [], [rna_fail]]) self.assertEqual(t5, [[], [int_fail], [float_fail], [bool_fail], []]) @skipIfDBFeature('is_mocked') class DatatypeApiTests(TestCase): def setUp(self): self.factory = APIRequestFactory() self.kive_user = kive_user() self.list_path = reverse("datatype-list") self.detail_pk = 7 self.detail_path = reverse("datatype-detail", kwargs={'pk': self.detail_pk}) self.removal_path = reverse("datatype-removal-plan", kwargs={'pk': self.detail_pk}) # This should equal metadata.ajax.CompoundDatatypeViewSet.as_view({"get": "list"}). self.list_view, _, _ = resolve(self.list_path) self.detail_view, _, _ = resolve(self.detail_path) self.removal_view, _, _ = resolve(self.removal_path) def test_auth(self): # First try to access while not logged in. request = self.factory.get(self.list_path) response = self.list_view(request) self.assertEquals(response.data["detail"], "Authentication credentials were not provided.") # Now log in and check that "detail" is not passed in the response. force_authenticate(request, user=self.kive_user) response = self.list_view(request) self.assertNotIn('detail', response.data) def test_list(self): """ Test the CompoundDatatype API list view. """ request = self.factory.get(self.list_path) force_authenticate(request, user=self.kive_user) response = self.list_view(request, pk=None) # There are four CDTs loaded into the Database by default. self.assertEquals(len(response.data), 7) self.assertEquals(response.data[0]['id'], 1) self.assertEquals(response.data[2]['name'], 'float') def test_detail(self): request = self.factory.get(self.detail_path) force_authenticate(request, user=self.kive_user) response = self.detail_view(request, pk=self.detail_pk) self.assertEquals(response.data['name'], 'nucleotide sequence') def test_removal_plan(self): request = self.factory.get(self.removal_path) force_authenticate(request, user=self.kive_user) response = self.removal_view(request, pk=self.detail_pk) self.assertEquals(response.data['Datatypes'], 1) def test_removal(self): start_count = Datatype.objects.all().count() request = self.factory.delete(self.detail_path) force_authenticate(request, user=self.kive_user) response = self.detail_view(request, pk=self.detail_pk) self.assertEquals(response.status_code, status.HTTP_204_NO_CONTENT) end_count = Datatype.objects.all().count() self.assertEquals(end_count, start_count - 1) @skipIfDBFeature('is_mocked') class CompoundDatatypeApiTests(TestCase): def setUp(self): self.factory = APIRequestFactory() self.kive_user = kive_user() self.list_path = reverse("compounddatatype-list") self.detail_pk = 3 self.detail_path = reverse("compounddatatype-detail", kwargs={'pk': self.detail_pk}) self.removal_path = reverse("compounddatatype-removal-plan", kwargs={'pk': self.detail_pk}) # We can't remove the CDT with PK = 3 as it's used by Kive's internals. self.removal_pk = 4 # This should equal metadata.ajax.CompoundDatatypeViewSet.as_view({"get": "list"}). self.list_view, _, _ = resolve(self.list_path) self.detail_view, _, _ = resolve(self.detail_path) self.removal_view, _, _ = resolve(self.removal_path) def test_auth(self): # First try to access while not logged in. request = self.factory.get(self.list_path) response = self.list_view(request) self.assertEquals(response.data["detail"], "Authentication credentials were not provided.") # Now log in and check that "detail" is not passed in the response. force_authenticate(request, user=self.kive_user) response = self.list_view(request) self.assertNotIn('detail', response.data) def test_list(self): """ Test the CompoundDatatype API list view. """ request = self.factory.get(self.list_path) force_authenticate(request, user=self.kive_user) response = self.list_view(request, pk=None) # There are four CDTs loaded into the Database by default, and they're # sorted alphabetically. See the migration where they're defined for # details. SQLite sorts case-sensitive and PostgreSQL doesn't. self.assertEquals(len(response.data), 4) cdt2 = next(cdt for cdt in response.data if cdt['id'] == 2) self.assertEquals(cdt2['representation'], '(failed_row: natural number)') def test_detail(self): request = self.factory.get(self.detail_path) force_authenticate(request, user=self.kive_user) response = self.detail_view(request, pk=self.detail_pk) self.assertEquals(response.data['representation'], '(example: string?, valid: boolean)') def test_removal_plan(self): request = self.factory.get(self.removal_path) force_authenticate(request, user=self.kive_user) response = self.removal_view(request, pk=self.removal_pk) self.assertEquals(response.data['CompoundDatatypes'], 1) def test_removal(self): start_count = CompoundDatatype.objects.all().count() request = self.factory.delete(self.detail_path) force_authenticate(request, user=self.kive_user) response = self.detail_view(request, pk=self.removal_pk) self.assertEquals(response.status_code, status.HTTP_204_NO_CONTENT) end_count = CompoundDatatype.objects.all().count() self.assertEquals(end_count, start_count - 1) def test_removal_plan_protected_CDT(self): """ Removal plan will not attempt to remove a "protected" CDT (one that's used by Kive's internals). """ request = self.factory.get(self.removal_path) force_authenticate(request, user=self.kive_user) response = self.removal_view(request, pk=self.detail_pk) self.assertEquals(response.data['CompoundDatatypes'], 0) def test_create(self): """ Test creation of a CompoundDatatype using the API. """ self.string_dt = Datatype.objects.get(pk=datatypes.STR_PK) cdt_dict = { "name": "GoodCDT", "members": [ { "column_idx": 1, "column_name": "col1", "datatype": self.string_dt.pk }, { "column_idx": 2, "column_name": "col2", "datatype": self.string_dt.pk } ], "groups_allowed": [everyone_group().name] } request = self.factory.post(self.list_path, cdt_dict, format="json") force_authenticate(request, user=self.kive_user) self.list_view(request) # Probe the resulting method. cdt = CompoundDatatype.objects.get(name=cdt_dict["name"]) self.assertEqual(cdt.name, cdt_dict["name"]) self.assertEqual(cdt.user, kive_user()) self.assertSetEqual(set(cdt.groups_allowed.all()), {everyone_group()}) self.assertFalse(cdt.users_allowed.exists()) self.assertEqual(cdt.members.count(), 2) col1 = cdt.members.get(column_idx=1) self.assertEqual(col1.column_name, cdt_dict["members"][0]["column_name"]) self.assertEqual(col1.datatype, self.string_dt) col2 = cdt.members.get(column_idx=2) self.assertEqual(col2.column_name, cdt_dict["members"][1]["column_name"]) self.assertEqual(col2.datatype, self.string_dt) @skipIfDBFeature('is_mocked') class AccessControlTests(TestCase): """ Tests of functionality of the AccessControl abstract class. """ def setUp(self): self.dt_owner = User.objects.create_user( "Noonian", "nsingh@compuserve.com", "feeeeeeelings" ) self.dt_owner.save() self.dt_owner.groups.add(everyone_group()) self.lore = User.objects.create_user( "Lore", "cto@borg.net", "Asimov's Three Laws" ) self.lore.save() self.lore.groups.add(everyone_group()) self.developers_group = Group.objects.get(pk=groups.DEVELOPERS_PK) self.bool_dt = Datatype.objects.get(pk=datatypes.BOOL_PK) self.ac_dt = Datatype(user=self.dt_owner, name="True", description="Python True") self.ac_dt.save() self.ac_dt.restricts.add(self.bool_dt) self.ac_dt.basic_constraints.create( ruletype=BasicConstraint.REGEXP, rule="True" ) self.users_to_intersect = User.objects.filter(pk__in=[self.dt_owner.pk, self.lore.pk]) self.groups_to_intersect = Group.objects.filter(pk__in=[self.developers_group.pk, everyone_group().pk]) def test_intersect_permissions_no_querysets_no_perms(self): """ Test of intersect_permissions when no querysets are specified and no permissions are given. """ users_qs, groups_qs = self.ac_dt.intersect_permissions() self.assertSetEqual({self.dt_owner}, set(users_qs)) self.assertFalse(groups_qs.exists()) def test_intersect_permissions_no_querysets_with_perms(self): """ Test of intersect_permissions when no querysets are specified and some permissions are given. """ self.ac_dt.users_allowed.add(self.lore) self.ac_dt.groups_allowed.add(self.developers_group) users_qs, groups_qs = self.ac_dt.intersect_permissions() self.assertSetEqual({self.dt_owner, self.lore}, set(users_qs)) self.assertSetEqual({self.developers_group}, set(groups_qs)) def test_intersect_permissions_no_querysets_everyone_perm(self): """ Test of intersect_permissions when no querysets are specified and the Everyone group has access. """ self.ac_dt.users_allowed.add(self.lore) self.ac_dt.groups_allowed.add(everyone_group()) users_qs, groups_qs = self.ac_dt.intersect_permissions() self.assertSetEqual(set(User.objects.all()), set(users_qs)) self.assertSetEqual(set(Group.objects.all()), set(groups_qs)) def test_intersect_permissions_querysets_specified_no_perms(self): """ Test of intersect_permissions when querysets are specified and no permissions are given. """ users_qs, groups_qs = self.ac_dt.intersect_permissions(users_qs=self.users_to_intersect, groups_qs=self.groups_to_intersect) self.assertSetEqual({self.dt_owner}, set(users_qs)) self.assertFalse(groups_qs.exists()) def test_intersect_permissions_querysets_with_perms(self): """ Test of intersect_permissions when querysets are specified and some permissions are given. """ self.ac_dt.users_allowed.add(self.dt_owner) self.ac_dt.groups_allowed.add(self.developers_group) users_qs, groups_qs = self.ac_dt.intersect_permissions(users_qs=self.users_to_intersect, groups_qs=self.groups_to_intersect) self.assertSetEqual({self.dt_owner}, set(users_qs)) self.assertSetEqual({self.developers_group}, set(groups_qs)) def test_intersect_permissions_querysets_everyone_perm(self): """ Test of intersect_permissions when querysets are specified and the Everyone group has access. """ self.ac_dt.groups_allowed.add(everyone_group()) users_qs, groups_qs = self.ac_dt.intersect_permissions(users_qs=self.users_to_intersect, groups_qs=self.groups_to_intersect) self.assertSetEqual(set(self.users_to_intersect), set(users_qs)) self.assertSetEqual(set(self.groups_to_intersect), set(groups_qs)) @skipIfDBFeature('is_mocked') class CompoundDatatypeSerializerTests(TestCase): def setUp(self): self.string_dt = Datatype.objects.get(pk=datatypes.STR_PK) self.dt_no_permissions = Datatype( user=kive_user(), name="DatatypeNoPermissions", description="Datatype with no added permissions" ) self.dt_no_permissions.save() self.dt_no_permissions.restricts.add(self.string_dt) self.dt_developer_permissions = Datatype( user=kive_user(), name="DatatypeDeveloperPermissions", description="Datatype that developers can access" ) self.dt_developer_permissions.save() self.dt_developer_permissions.restricts.add(self.string_dt) self.dt_developer_permissions.groups_allowed.add( Group.objects.get(pk=groups.DEVELOPERS_PK) ) self.kive_context = DuckContext(user=kive_user()) def test_validate_empty(self): """ Testing validation of a CDT with no members. """ cdt_dict = { "name": "EmptyCDT" } cdts = CompoundDatatypeSerializer(data=cdt_dict, context=self.kive_context) self.assertTrue(cdts.is_valid()) def test_validate_no_permissions(self): """ Testing validation of a good CDT serialization with no permissions granted. """ cdt_dict = { "name": "GoodCDT", "members": [ { "column_idx": 1, "column_name": "col1", "datatype": self.string_dt.pk }, { "column_idx": 2, "column_name": "col2", "datatype": self.string_dt.pk } ] } cdts = CompoundDatatypeSerializer(data=cdt_dict, context=self.kive_context) self.assertTrue(cdts.is_valid()) def test_validate_good_permissions(self): """ Testing validation of a good CDT serialization with some permissions granted. """ cdt_dict = { "name": "GoodCDT", "members": [ { "column_idx": 1, "column_name": "col1", "datatype": self.string_dt.pk }, { "column_idx": 2, "column_name": "col2", "datatype": self.string_dt.pk } ], "users_allowed": [kive_user().username], "groups_allowed": [everyone_group().name] } cdts = CompoundDatatypeSerializer(data=cdt_dict, context=self.kive_context) self.assertTrue(cdts.is_valid()) def test_validate_bad_indices(self): """ Validation fails if the indices are not consecutive from 1. """ cdt_dict = { "name": "BadIndicesCDT", "members": [ { "column_idx": 1, "column_name": "col1", "datatype": self.string_dt.pk }, { "column_idx": 3, "column_name": "col3bad", "datatype": self.string_dt.pk } ] } cdts = CompoundDatatypeSerializer(data=cdt_dict, context=self.kive_context) self.assertFalse(cdts.is_valid()) self.assertListEqual( cdts.errors["non_field_errors"], ["Column indices must be consecutive starting from 1"] ) def test_validate_bad_permissions(self): """ Validation fails if the CDT permissions exceed those of its members. """ new_group = Group(name="Interlopers") new_group.save() new_user = User.objects.create_user("NewUser", password="foo") # A Datatype that new_group can access but new_user can't. dt_no_new_user = Datatype( user=kive_user(), name="DatatypeNoNewUser", description="No new users allowed" ) dt_no_new_user.save() dt_no_new_user.restricts.add(self.string_dt) dt_no_new_user.groups_allowed.add(new_group) # A Datatype that new_user can access but new_group can't. dt_no_new_group = Datatype( user=kive_user(), name="DatatypeNoNewGroup", description="No new groups allowed" ) dt_no_new_group.save() dt_no_new_group.restricts.add(self.string_dt) dt_no_new_group.users_allowed.add(new_user) cdt_dict = { "name": "BadPermissionsCDT", "members": [ { "column_idx": 1, "column_name": "col1", "datatype": self.dt_no_permissions.pk }, { "column_idx": 2, "column_name": "col2", "datatype": self.string_dt.pk }, { "column_idx": 3, "column_name": "col3", "datatype": dt_no_new_user.pk }, { "column_idx": 4, "column_name": "col4", "datatype": dt_no_new_group.pk } ], "users_allowed": [new_user.username], "groups_allowed": [new_group.name] } cdts = CompoundDatatypeSerializer(data=cdt_dict, context=self.kive_context) self.assertFalse(cdts.is_valid()) self.assertListEqual( cdts.errors["non_field_errors"], [ "User {} cannot be granted access".format(new_user.username), "Group {} cannot be granted access".format(new_group.name) ] ) def test_create(self): """ Test creation of a CompoundDatatype via deserialization. """ cdt_dict = { "name": "GoodCDT", "members": [ { "column_idx": 1, "column_name": "col1", "datatype": self.string_dt.pk }, { "column_idx": 2, "column_name": "col2", "datatype": self.string_dt.pk } ], "groups_allowed": [everyone_group().name] } cdts = CompoundDatatypeSerializer(data=cdt_dict, context=self.kive_context) self.assertTrue(cdts.is_valid()) cdt = cdts.save() self.assertEqual(cdt.name, cdt_dict["name"]) self.assertEqual(cdt.user, kive_user()) self.assertSetEqual(set(cdt.groups_allowed.all()), {everyone_group()}) self.assertFalse(cdt.users_allowed.exists()) self.assertEqual(cdt.members.count(), 2) col1 = cdt.members.get(column_idx=1) self.assertEqual(col1.column_name, cdt_dict["members"][0]["column_name"]) self.assertEqual(col1.datatype, self.string_dt) col2 = cdt.members.get(column_idx=2) self.assertEqual(col2.column_name, cdt_dict["members"][1]["column_name"]) self.assertEqual(col2.datatype, self.string_dt)
40.849108
120
0.623585
14,173
119,116
4.973965
0.050871
0.024852
0.013902
0.018384
0.80789
0.763377
0.724665
0.692819
0.655186
0.619326
0
0.012049
0.274682
119,116
2,915
121
40.863122
0.803905
0.133324
0
0.534186
0
0
0.102077
0.005628
0
0
0
0.000686
0.126737
1
0.115064
false
0.030573
0.007782
0
0.130072
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
bca64462b5e4828d5e087bdb44d4d37bff0dd2e6
256
py
Python
unke/exceptions.py
tim-sueberkrueb/unke-python
a61a0345d6c352661d229a71fd0e098947c2a211
[ "MIT" ]
null
null
null
unke/exceptions.py
tim-sueberkrueb/unke-python
a61a0345d6c352661d229a71fd0e098947c2a211
[ "MIT" ]
null
null
null
unke/exceptions.py
tim-sueberkrueb/unke-python
a61a0345d6c352661d229a71fd0e098947c2a211
[ "MIT" ]
null
null
null
# -*- coding: utf-8 class ParseException(Exception): """ Thrown by Unke parser in case of syntax errors """ def __init__(self, message, line, col): super(ParseException, self).__init__("{}, line {}:{}".format(message, line, col))
25.6
89
0.625
30
256
5.066667
0.766667
0.144737
0.184211
0
0
0
0
0
0
0
0
0.004951
0.210938
256
9
90
28.444444
0.747525
0.253906
0
0
0
0
0.08
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
bcc7f406ca9fe1fa56dcf9cf941b8c6be4abb80c
72
py
Python
tests/__init__.py
RealAllenDa/EEWMap
4031c951fac97a099fb715c38a1dd77c52390a15
[ "Apache-2.0" ]
null
null
null
tests/__init__.py
RealAllenDa/EEWMap
4031c951fac97a099fb715c38a1dd77c52390a15
[ "Apache-2.0" ]
7
2021-11-20T06:52:50.000Z
2022-01-13T10:01:21.000Z
tests/__init__.py
RealAllenDa/EEWMap
4031c951fac97a099fb715c38a1dd77c52390a15
[ "Apache-2.0" ]
null
null
null
""" EEWMap - Tests Unittest - Used to test modules and utilities. """
14.4
47
0.666667
9
72
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.208333
72
4
48
18
0.842105
0.847222
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
bce1e3233c128f14adf74ec6cd0ebf2bc5e04bb2
298
py
Python
homeassistant/components/sonos/exception.py
MrDelik/core
93a66cc357b226389967668441000498a10453bb
[ "Apache-2.0" ]
3
2021-11-22T22:37:43.000Z
2022-03-17T00:55:28.000Z
homeassistant/components/sonos/exception.py
MrDelik/core
93a66cc357b226389967668441000498a10453bb
[ "Apache-2.0" ]
25
2021-11-24T06:24:10.000Z
2022-03-31T06:23:06.000Z
homeassistant/components/sonos/exception.py
MrDelik/core
93a66cc357b226389967668441000498a10453bb
[ "Apache-2.0" ]
3
2022-01-02T18:49:54.000Z
2022-01-25T02:03:54.000Z
"""Sonos specific exceptions.""" from homeassistant.components.media_player.errors import BrowseError from homeassistant.exceptions import HomeAssistantError class UnknownMediaType(BrowseError): """Unknown media type.""" class SonosUpdateError(HomeAssistantError): """Update failed."""
24.833333
68
0.788591
27
298
8.666667
0.703704
0.145299
0
0
0
0
0
0
0
0
0
0
0.107383
298
11
69
27.090909
0.879699
0.204698
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
bcf9bc7aaffe5aac2b4d676b557a7c3ffbc3436a
80
py
Python
test/services/scan_package/service1.py
livioribeiro/dependency-injector
ab76415233d7e6f82ff13479d10c2aa0f100c173
[ "Unlicense" ]
1
2021-08-13T20:23:56.000Z
2021-08-13T20:23:56.000Z
test/sync_tests/services/scan_package/service1.py
livioribeiro/dependency-injector
ab76415233d7e6f82ff13479d10c2aa0f100c173
[ "Unlicense" ]
null
null
null
test/sync_tests/services/scan_package/service1.py
livioribeiro/dependency-injector
ab76415233d7e6f82ff13479d10c2aa0f100c173
[ "Unlicense" ]
null
null
null
from dependency_injector import singleton @singleton class Service1: pass
11.428571
41
0.8
9
80
7
0.888889
0
0
0
0
0
0
0
0
0
0
0.015152
0.175
80
6
42
13.333333
0.939394
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.25
0.25
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
4c2153194300cac891a469113895fdf7292ba6c3
2,091
py
Python
kubernetes/test/test_admissionregistration_v1alpha1_api.py
sgwilliams-ebsco/python
35e6406536c96d4769ff7e2a02bf0fdcb902a509
[ "Apache-2.0" ]
1
2021-06-10T23:44:11.000Z
2021-06-10T23:44:11.000Z
kubernetes/test/test_admissionregistration_v1alpha1_api.py
sgwilliams-ebsco/python
35e6406536c96d4769ff7e2a02bf0fdcb902a509
[ "Apache-2.0" ]
null
null
null
kubernetes/test/test_admissionregistration_v1alpha1_api.py
sgwilliams-ebsco/python
35e6406536c96d4769ff7e2a02bf0fdcb902a509
[ "Apache-2.0" ]
1
2018-11-06T16:33:43.000Z
2018-11-06T16:33:43.000Z
# coding: utf-8 """ Kubernetes No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: v1.12.2 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import os import sys import unittest import kubernetes.client from kubernetes.client.rest import ApiException from kubernetes.client.apis.admissionregistration_v1alpha1_api import AdmissionregistrationV1alpha1Api class TestAdmissionregistrationV1alpha1Api(unittest.TestCase): """ AdmissionregistrationV1alpha1Api unit test stubs """ def setUp(self): self.api = kubernetes.client.apis.admissionregistration_v1alpha1_api.AdmissionregistrationV1alpha1Api() def tearDown(self): pass def test_create_initializer_configuration(self): """ Test case for create_initializer_configuration """ pass def test_delete_collection_initializer_configuration(self): """ Test case for delete_collection_initializer_configuration """ pass def test_delete_initializer_configuration(self): """ Test case for delete_initializer_configuration """ pass def test_get_api_resources(self): """ Test case for get_api_resources """ pass def test_list_initializer_configuration(self): """ Test case for list_initializer_configuration """ pass def test_patch_initializer_configuration(self): """ Test case for patch_initializer_configuration """ pass def test_read_initializer_configuration(self): """ Test case for read_initializer_configuration """ pass def test_replace_initializer_configuration(self): """ Test case for replace_initializer_configuration """ pass if __name__ == '__main__': unittest.main()
20.70297
111
0.656624
198
2,091
6.636364
0.318182
0.255708
0.066971
0.091324
0.522831
0.416286
0.126332
0
0
0
0
0.011221
0.275466
2,091
100
112
20.91
0.856106
0.307508
0
0.3
1
0
0.006957
0
0
0
0
0
0
1
0.333333
false
0.3
0.233333
0
0.6
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
4c70c1489928d9894fe77c88ab0c8b0cc9a062e5
3,280
py
Python
var/spack/repos/builtin/packages/py-cython/package.py
jeanbez/spack
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
null
null
null
var/spack/repos/builtin/packages/py-cython/package.py
jeanbez/spack
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
8
2021-11-09T20:28:40.000Z
2022-03-15T03:26:33.000Z
var/spack/repos/builtin/packages/py-cython/package.py
jeanbez/spack
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
[ "ECL-2.0", "Apache-2.0", "MIT-0", "MIT" ]
2
2019-02-08T20:37:20.000Z
2019-03-31T15:19:26.000Z
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * class PyCython(PythonPackage): """The Cython compiler for writing C extensions for the Python language.""" homepage = "https://github.com/cython/cython" pypi = "cython/Cython-0.29.21.tar.gz" version('3.0.0a9', sha256='23931c45877432097cef9de2db2dc66322cbc4fc3ebbb42c476bb2c768cecff0') version('0.29.30', sha256='2235b62da8fe6fa8b99422c8e583f2fb95e143867d337b5c75e4b9a1a865f9e3', preferred=True) version('0.29.24', sha256='cdf04d07c3600860e8c2ebaad4e8f52ac3feb212453c1764a49ac08c827e8443') version('0.29.23', sha256='6a0d31452f0245daacb14c979c77e093eb1a546c760816b5eed0047686baad8e') version('0.29.22', sha256='df6b83c7a6d1d967ea89a2903e4a931377634a297459652e4551734c48195406') version('0.29.21', sha256='e57acb89bd55943c8d8bf813763d20b9099cc7165c0f16b707631a7654be9cad') version('0.29.20', sha256='22d91af5fc2253f717a1b80b8bb45acb655f643611983fd6f782b9423f8171c7') version('0.29.16', sha256='232755284f942cbb3b43a06cd85974ef3c970a021aef19b5243c03ee2b08fa05') version('0.29.15', sha256='60d859e1efa5cc80436d58aecd3718ff2e74b987db0518376046adedba97ac30') version('0.29.14', sha256='e4d6bb8703d0319eb04b7319b12ea41580df44fd84d83ccda13ea463c6801414') version('0.29.13', sha256='c29d069a4a30f472482343c866f7486731ad638ef9af92bfe5fca9c7323d638e') version('0.29.10', sha256='26229570d6787ff3caa932fe9d802960f51a89239b990d275ae845405ce43857') version('0.29.7', sha256='55d081162191b7c11c7bfcb7c68e913827dfd5de6ecdbab1b99dab190586c1e8') version('0.29.5', sha256='9d5290d749099a8e446422adfb0aa2142c711284800fb1eb70f595101e32cbf1') version('0.29', sha256='94916d1ede67682638d3cc0feb10648ff14dc51fb7a7f147f4fedce78eaaea97') version('0.28.6', sha256='68aa3c00ef1deccf4dd50f0201d47c268462978c12c42943bc33dc9dc816ac1b') version('0.28.3', sha256='1aae6d6e9858888144cea147eb5e677830f45faaff3d305d77378c3cba55f526') version('0.28.1', sha256='152ee5f345012ca3bb7cc71da2d3736ee20f52cd8476e4d49e5e25c5a4102b12') version('0.25.2', sha256='f141d1f9c27a07b5a93f7dc5339472067e2d7140d1c5a9e20112a5665ca60306') version('0.23.5', sha256='0ae5a5451a190e03ee36922c4189ca2c88d1df40a89b4f224bc842d388a0d1b6') version('0.23.4', sha256='fec42fecee35d6cc02887f1eef4e4952c97402ed2800bfe41bbd9ed1a0730d8e') version('0.21.2', sha256='b01af23102143515e6138a4d5e185c2cfa588e0df61c0827de4257bac3393679') depends_on('python@2.7:2,3.4:', when='@3:', type=('build', 'link', 'run')) depends_on('python@2.6:2,3.3:', when='@0.23:', type=('build', 'link', 'run')) depends_on('python@:2', when='@:0.22', type=('build', 'link', 'run')) depends_on('py-setuptools', type=('build', 'run')) depends_on('gdb@7.2:', type='test') @property def command(self): """Returns the Cython command""" return Executable(self.prefix.bin.cython) @run_after('install') @on_package_attributes(run_tests=True) def build_test(self): # Warning: full suite of unit tests takes a very long time python('runtests.py', '-j', str(make_jobs))
60.740741
113
0.771341
293
3,280
8.59727
0.457338
0.066693
0.055578
0.019055
0.035331
0.035331
0.025407
0.025407
0
0
0
0.372156
0.102134
3,280
53
114
61.886792
0.483192
0.104573
0
0
0
0
0.6013
0.491444
0
0
0
0
0
1
0.052632
false
0
0.026316
0
0.184211
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
d5b3ff70593a9c07cd082586f374b785fb34d0ef
575
py
Python
PythonProjects/99-CapstoneProject-202020d/libs/set_robot_number.py
much2mutch/csse120-public
4f862a6deb7a5373fb5723fb2a23e4042e4d4157
[ "MIT" ]
null
null
null
PythonProjects/99-CapstoneProject-202020d/libs/set_robot_number.py
much2mutch/csse120-public
4f862a6deb7a5373fb5723fb2a23e4042e4d4157
[ "MIT" ]
null
null
null
PythonProjects/99-CapstoneProject-202020d/libs/set_robot_number.py
much2mutch/csse120-public
4f862a6deb7a5373fb5723fb2a23e4042e4d4157
[ "MIT" ]
null
null
null
""" Capstone Team Project. Sets the ROBOT NUMBER for YOUR team's robot. Winter term, 2019-2020. """ def get_robot_number(): my_robot_number = None return my_robot_number # -------------------------------------------------------------------------- # TODO: 1. Set my_robot_number in the above to YOUR team's robot number # (e.g., 5 or 27). Set it to None if you do NOT want to send messages # to/from the robot (e.g., if you only want to display the GUI). # --------------------------------------------------------------------------
38.333333
80
0.48
74
575
3.621622
0.567568
0.246269
0.145522
0.104478
0
0
0
0
0
0
0
0.02603
0.198261
575
14
81
41.071429
0.555315
0.794783
0
0
0
0
0
0
0
0
0
0.071429
0
1
0.333333
false
0
0
0
0.666667
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
1
0
0
4
d5d3256539904ba5dd3ab906b9ac486c09860e09
37
py
Python
pythonCode/test.py
eatmore/python_practice
c6a773c8d24182b23a86fd9b66b27b5ff948b258
[ "MIT" ]
null
null
null
pythonCode/test.py
eatmore/python_practice
c6a773c8d24182b23a86fd9b66b27b5ff948b258
[ "MIT" ]
null
null
null
pythonCode/test.py
eatmore/python_practice
c6a773c8d24182b23a86fd9b66b27b5ff948b258
[ "MIT" ]
1
2020-03-12T06:05:38.000Z
2020-03-12T06:05:38.000Z
s = 'PYTHON' print('{0:3}'.format(s))
18.5
24
0.567568
7
37
3
0.857143
0
0
0
0
0
0
0
0
0
0
0.058824
0.081081
37
2
24
18.5
0.558824
0
0
0
0
0
0.289474
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
910b73331c5b8e9c51deafb4a5d60d9dfdb67812
554
py
Python
app/domain/use_case/responses.py
globocom/enforcement
004ff545d6d61b95b555d9981525510496862b3e
[ "BSD-3-Clause" ]
7
2020-11-08T18:02:26.000Z
2021-10-15T21:40:35.000Z
app/domain/use_case/responses.py
globocom/enforcement
004ff545d6d61b95b555d9981525510496862b3e
[ "BSD-3-Clause" ]
19
2020-11-19T20:57:20.000Z
2021-09-03T14:53:34.000Z
app/domain/use_case/responses.py
globocom/enforcement-service
004ff545d6d61b95b555d9981525510496862b3e
[ "BSD-3-Clause" ]
3
2020-10-03T02:40:34.000Z
2020-10-19T10:17:06.000Z
from dataclasses import dataclass, field from typing import List from app.domain.entities import Cluster, Enforcement @dataclass class RulesResponse: clusters: List[Cluster] = field(default_factory=list) install_errors: List[Enforcement] = field(default_factory=list) @dataclass class UpdateRulesResponse(RulesResponse): removed_enforcements: List[Enforcement] = field(default_factory=list) changed_enforcements: List[Enforcement] = field(default_factory=list) added_enforcements: List[Enforcement] = field(default_factory=list)
30.777778
73
0.801444
62
554
7.016129
0.387097
0.137931
0.218391
0.264368
0.432184
0.432184
0.344828
0
0
0
0
0
0.117329
554
17
74
32.588235
0.889571
0
0
0.166667
0
0
0
0
0
0
0
0
0
1
0
true
0
0.25
0
0.833333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
4
911bb2f2cfe253abbd4204eb7bec3b25f21679ba
222
py
Python
server/src/commands/weather.py
op23n1/bridge
8252224c0956afb49b86a2754763ac23767eb707
[ "MIT" ]
1
2021-01-16T05:50:28.000Z
2021-01-16T05:50:28.000Z
server/src/commands/weather.py
op23n1/bridge
8252224c0956afb49b86a2754763ac23767eb707
[ "MIT" ]
null
null
null
server/src/commands/weather.py
op23n1/bridge
8252224c0956afb49b86a2754763ac23767eb707
[ "MIT" ]
3
2021-01-17T17:17:06.000Z
2021-01-17T19:04:40.000Z
from commands.base_command import Command from commands.utils import check_weather class Weather(Command): def exec(self): """ !weather;City """ return check_weather(self.args[0])
27.75
42
0.63964
26
222
5.346154
0.615385
0.172662
0
0
0
0
0
0
0
0
0
0.006135
0.265766
222
8
43
27.75
0.846626
0.058559
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.4
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
914fea0eeafd0a0f83f53f147af71b791090d6b7
97
py
Python
sample/helloworld/test.py
HBuczynski/phoenix-rtos-tests
3485485e128f5b688be808a968a399938a303fa3
[ "BSD-3-Clause" ]
null
null
null
sample/helloworld/test.py
HBuczynski/phoenix-rtos-tests
3485485e128f5b688be808a968a399938a303fa3
[ "BSD-3-Clause" ]
null
null
null
sample/helloworld/test.py
HBuczynski/phoenix-rtos-tests
3485485e128f5b688be808a968a399938a303fa3
[ "BSD-3-Clause" ]
null
null
null
def harness(p): if 'Hello world!' not in p.readline(): return False return True
16.166667
42
0.597938
14
97
4.142857
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.298969
97
5
43
19.4
0.852941
0
0
0
0
0
0.123711
0
0
0
0
0
0
1
0.25
false
0
0
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
e66c404ce27fca686b74503befc14c2adc918f47
83
py
Python
xtractor/__main__.py
Bichwaa/xtractor
ce321dda6dbe7cd5b4b924f3794197d346d39adf
[ "MIT" ]
null
null
null
xtractor/__main__.py
Bichwaa/xtractor
ce321dda6dbe7cd5b4b924f3794197d346d39adf
[ "MIT" ]
1
2022-01-31T17:09:07.000Z
2022-02-03T07:25:38.000Z
xtractor/__main__.py
Bichwaa/xtractor
ce321dda6dbe7cd5b4b924f3794197d346d39adf
[ "MIT" ]
null
null
null
'''extracts shit ''' import fire from xtractor import recover fire.Fire(recover)
11.857143
28
0.746988
11
83
5.636364
0.636364
0
0
0
0
0
0
0
0
0
0
0
0.144578
83
7
29
11.857143
0.873239
0.156627
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
e6893a1dc607b3e5d29ddc6b312702493b39c856
758
py
Python
tests/test_collecting/test_handler/test_equeality.py
ExpressApp/pybotx
97c8b1ce5d45a05567ed01d545cb43174a2dcbb9
[ "MIT" ]
13
2021-01-21T12:43:10.000Z
2022-03-23T11:11:59.000Z
tests/test_collecting/test_handler/test_equeality.py
ExpressApp/pybotx
97c8b1ce5d45a05567ed01d545cb43174a2dcbb9
[ "MIT" ]
259
2020-02-26T08:51:03.000Z
2022-03-23T11:08:36.000Z
tests/test_collecting/test_handler/test_equeality.py
ExpressApp/pybotx
97c8b1ce5d45a05567ed01d545cb43174a2dcbb9
[ "MIT" ]
5
2019-12-02T16:19:22.000Z
2021-11-22T20:33:34.000Z
from botx.collecting.handlers.handler import Handler pytest_plugins = ("tests.test_collecting.fixtures",) def test_equality_is_false_if_not_handler_passed(handler_as_function): handler = Handler(body="/command", handler=handler_as_function) assert handler != "" def test_equality_is_false_if_handlers_are_different(handler_as_function): handler1 = Handler(body="/command1", handler=handler_as_function) handler2 = Handler(body="/command2", handler=handler_as_function) assert handler1 != handler2 def test_equality_if_handlers_are_similar(handler_as_function): handler1 = Handler(body="/command", handler=handler_as_function) handler2 = Handler(body="/command", handler=handler_as_function) assert handler1 == handler2
36.095238
74
0.792876
95
758
5.936842
0.315789
0.12766
0.241135
0.212766
0.638298
0.638298
0.445035
0.170213
0
0
0
0.014837
0.110818
758
20
75
37.9
0.821958
0
0
0
0
0
0.094987
0.039578
0
0
0
0
0.230769
1
0.230769
false
0.076923
0.076923
0
0.307692
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
e69a3603c29d26822dbc0d7d317da4530a211e5b
26
py
Python
src/ralph/__init__.py
kula1922/ralph
08642714cc894025ba4792eeae63037a46f045b3
[ "Apache-2.0" ]
null
null
null
src/ralph/__init__.py
kula1922/ralph
08642714cc894025ba4792eeae63037a46f045b3
[ "Apache-2.0" ]
null
null
null
src/ralph/__init__.py
kula1922/ralph
08642714cc894025ba4792eeae63037a46f045b3
[ "Apache-2.0" ]
null
null
null
VERSION = ('2', '2', '0')
13
25
0.384615
4
26
2.5
0.75
0
0
0
0
0
0
0
0
0
0
0.142857
0.192308
26
1
26
26
0.333333
0
0
0
0
0
0.115385
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e69d55db9b7df39b2265105f72964987772020b8
210
py
Python
lm_pickers/definied.py
dulek/alt-tester
d3925e28b2b01ca25706546646b921de90fcd5c8
[ "MIT" ]
null
null
null
lm_pickers/definied.py
dulek/alt-tester
d3925e28b2b01ca25706546646b921de90fcd5c8
[ "MIT" ]
null
null
null
lm_pickers/definied.py
dulek/alt-tester
d3925e28b2b01ca25706546646b921de90fcd5c8
[ "MIT" ]
null
null
null
from lm_picker import LMPicker class DefiniedLMPicker(LMPicker): def get_landmarks(self, lm_num=10): lms = [9987, 2062, 36861, 1483, 37357, 30542, 1271, 6699] return self._calc_dists(lms)
26.25
65
0.695238
29
210
4.862069
0.862069
0
0
0
0
0
0
0
0
0
0
0.221557
0.204762
210
7
66
30
0.622754
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
e6adf08a425d37c31a6d4cd8d85cee106a2490c5
32,331
py
Python
mergify_engine/tests/functional/test_engine_v2.py
guits/mergify-engine
5b97e2697e4743ff9e049b90246f9161359655be
[ "Apache-2.0" ]
null
null
null
mergify_engine/tests/functional/test_engine_v2.py
guits/mergify-engine
5b97e2697e4743ff9e049b90246f9161359655be
[ "Apache-2.0" ]
null
null
null
mergify_engine/tests/functional/test_engine_v2.py
guits/mergify-engine
5b97e2697e4743ff9e049b90246f9161359655be
[ "Apache-2.0" ]
null
null
null
# -*- encoding: utf-8 -*- # # Copyright © 2018 Mehdi Abaakouk <sileht@sileht.net> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import github import yaml from mergify_engine import check_api from mergify_engine import config from mergify_engine import mergify_pull from mergify_engine.tasks.engine import v2 from mergify_engine.tests.functional import base LOG = logging.getLogger(__name__) MERGE_EVENTS = [ ("pull_request", {"action": "closed"}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_suite", {"action": "requested"}), ] def run_smart_strict_workflow_periodic_task(): # NOTE(sileht): actions must not be loaded manually before the celery # worker. Otherwise we have circular import loop. from mergify_engine.actions import merge merge.smart_strict_workflow_periodic_task.apply_async() class TestEngineV2Scenario(base.FunctionalTestBase): """Mergify engine tests. Tests user github resource and are slow, so we must reduce the number of scenario as much as possible for now. """ def setUp(self): with open(v2.mergify_rule_path, "r") as f: v2.MERGIFY_RULE = yaml.safe_load(f.read().replace( "mergify[bot]", "mergify-test[bot]")) super(TestEngineV2Scenario, self).setUp() def test_backport_cancelled(self): rules = {'pull_request_rules': [ {"name": "backport", "conditions": [ "base=master", "label=backport-3.1", ], "actions": { "backport": { "branches": ['stable/3.1'], }} } ]} self.setup_repo(yaml.dump(rules), test_branches=['stable/3.1']) p, _ = self.create_pr() self.add_label_and_push_events(p, "backport-3.1") self.push_events([ ("check_run", {"check_run": {"conclusion": "success"}}), # Summary ("check_run", {"check_run": {"conclusion": None}}), # Backport ]) p.remove_from_labels("backport-3.1") self.push_events([ ("pull_request", {"action": "unlabeled"}), ("check_run", {"check_run": {"conclusion": "success"}}), # Summary # Backport ("check_run", {"check_run": {"conclusion": "cancelled"}}), ], ordered=False) checks = list(check_api.get_checks(p, { "check_name": "Mergify — Rule: backport (backport)"})) self.assertEqual("cancelled", checks[0].conclusion) def test_delete_branch(self): rules = {'pull_request_rules': [ {"name": "delete on merge", "conditions": [ "base=master", "label=merge", "merged", ], "actions": { "delete_head_branch": None} }, {"name": "delete on close", "conditions": [ "base=master", "label=close", "closed", ], "actions": { "delete_head_branch": {}} } ]} self.setup_repo(yaml.dump(rules)) p1, _ = self.create_pr(base_repo="main") p1.merge() self.push_events([ ("check_suite", {"action": "requested"}), ("check_run", {"check_run": {"conclusion": "success"}}), # Summary ("pull_request", {"action": "closed"}), ], ordered=False) p2, _ = self.create_pr(base_repo="main") p2.edit(state="close") self.push_events([ ("pull_request", {"action": "closed"}), ("check_run", {"check_run": {"conclusion": "success"}}), # Summary ], ordered=False) self.add_label_and_push_events( p1, "merge", [("check_run", {"check_run": {"conclusion": "success"}})] ) self.push_events([ ("check_run", {"check_run": {"conclusion": "success"}}), # Summary ("check_run", {"check_run": {"conclusion": "success"}}), # Merge ], ordered=False) self.add_label_and_push_events(p2, "close") self.push_events([ ("check_run", {"check_run": {"conclusion": "success"}}), # Summary ("check_run", {"check_run": {"conclusion": "success"}}), # Merge ], ordered=False) pulls = list(self.r_o_admin.get_pulls(state="all")) self.assertEqual(2, len(pulls)) for b in ("main/pr1", "main/pr2"): try: self.r_o_admin.get_branch(b) except github.GithubException as e: if e.status == 404: continue self.assertTrue(False, "branch %s not deleted" % b) def test_label(self): rules = {'pull_request_rules': [ {"name": "rename label", "conditions": [ "base=master", "label=stable", ], "actions": { "label": { "add": ['unstable', 'foobar'], "remove": ['stable', 'what'], }} } ]} self.setup_repo(yaml.dump(rules)) p, _ = self.create_pr() self.add_label_and_push_events(p, "stable") pulls = list(self.r_o_admin.get_pulls()) self.assertEqual(1, len(pulls)) self.assertEqual(sorted(["unstable", "foobar"]), sorted([l.name for l in pulls[0].labels])) def test_comment(self): rules = {'pull_request_rules': [ {"name": "comment", "conditions": [ "base=master", ], "actions": { "comment": { "message": "WTF?" }} } ]} self.setup_repo(yaml.dump(rules)) p, _ = self.create_pr() self.push_events([ ("check_run", {"action": "completed"}), ("check_run", {"action": "created"}), ]) p.update() comments = list(p.get_issue_comments()) self.assertEqual("WTF?", comments[-1].body) # Add a label to trigger mergify self.add_label_and_push_events(p, "stable") # Ensure nothing changed new_comments = list(p.get_issue_comments()) self.assertEqual(len(comments), len(new_comments)) self.assertEqual("WTF?", new_comments[-1].body) def test_close(self): rules = {'pull_request_rules': [ {"name": "rename label", "conditions": [ "base=master", ], "actions": { "close": { "message": "WTF?" }} } ]} self.setup_repo(yaml.dump(rules)) p, _ = self.create_pr() p.update() self.assertEqual("closed", p.state) self.assertEqual("WTF?", list(p.get_issue_comments())[-1].body) def test_dismiss_reviews(self): rules = {'pull_request_rules': [ {"name": "dismiss reviews", "conditions": [ "base=master", ], "actions": { "dismiss_reviews": { "approved": True, "changes_requested": ["mergify-test1"], }} } ]} self.setup_repo(yaml.dump(rules)) p, commits = self.create_pr() branch = "fork/pr%d" % self.pr_counter self.create_review_and_push_event(p, commits[-1], "APPROVE") self.assertEqual( [("APPROVED", "mergify-test1")], [(r.state, r.user.login) for r in p.get_reviews()] ) open(self.git.tmp + "/unwanted_changes", "wb").close() self.git("add", self.git.tmp + "/unwanted_changes") self.git("commit", "--no-edit", "-m", "unwanted_changes") self.git("push", "--quiet", "fork", branch) self.push_events([ ("pull_request", {"action": "synchronize"}), ]), self.push_events([ ("check_suite", {"action": "completed"}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_run", {"check_run": {"conclusion": "success"}}), ("pull_request_review", {"action": "dismissed"}), ], ordered=False) self.assertEqual( [("DISMISSED", "mergify-test1")], [(r.state, r.user.login) for r in p.get_reviews()] ) commits = list(p.get_commits()) self.create_review_and_push_event(p, commits[-1], "REQUEST_CHANGES") self.assertEqual( [("DISMISSED", "mergify-test1"), ("CHANGES_REQUESTED", "mergify-test1")], [(r.state, r.user.login) for r in p.get_reviews()] ) open(self.git.tmp + "/unwanted_changes2", "wb").close() self.git("add", self.git.tmp + "/unwanted_changes2") self.git("commit", "--no-edit", "-m", "unwanted_changes2") self.git("push", "--quiet", "fork", branch) self.push_events([ ("pull_request", {"action": "synchronize"}), ]), self.push_events([ ("check_suite", {"action": "completed"}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_run", {"check_run": {"conclusion": "success"}}), ("pull_request_review", {"action": "dismissed"}), ], ordered=False) self.assertEqual( [("DISMISSED", "mergify-test1"), ("DISMISSED", "mergify-test1")], [(r.state, r.user.login) for r in p.get_reviews()] ) def test_merge_backport(self): rules = {'pull_request_rules': [ {"name": "Merge on master", "conditions": [ "base=master", "status-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": { "merge": {} }}, {"name": "Backport to stable/3.1", "conditions": [ "base=master", "label=backport-3.1", ], "actions": { "backport": { "branches": ['stable/3.1'], }} }, {"name": "automerge backport", "conditions": [ "head~=^mergify/bp/", ], "actions": { "merge": {} }}, ]} self.setup_repo(yaml.dump(rules), test_branches=['stable/3.1']) self.create_pr() p2, commits = self.create_pr() self.add_label_and_push_events(p2, "backport-3.1") self.push_events([ # Summary ("check_run", {"check_run": {"conclusion": "success"}}), # Backport ("check_run", {"check_run": {"conclusion": None}}), ], ordered=False) self.create_status_and_push_event(p2, context="not required status check", state="failure") self.create_status_and_push_event(p2) self.push_events([ # Summary ("check_run", {"check_run": {"conclusion": "success"}}), ]) self.create_review_and_push_event(p2, commits[0]) self.push_events(MERGE_EVENTS, ordered=False) self.push_events([ ("check_suite", {"action": "requested"}), ("pull_request", {"action": "opened"}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_suite", {"action": "completed"}), ("pull_request", {"action": "closed"}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_suite", {"action": "requested"}), ], ordered=False) pulls = list(self.r_o_admin.get_pulls(state="all")) self.assertEqual(3, len(pulls)) self.assertEqual(3, pulls[0].number) self.assertEqual(2, pulls[1].number) self.assertEqual(1, pulls[2].number) self.assertEqual(True, pulls[1].merged) self.assertEqual("closed", pulls[1].state) self.assertEqual(True, pulls[0].merged) self.assertEqual("closed", pulls[0].state) self.assertEqual([], [b.name for b in self.r_o_admin.get_branches() if b.name.startswith("mergify/bp")]) def test_merge_strict_rebase(self): rules = {'pull_request_rules': [ {"name": "smart strict merge on master", "conditions": [ "base=master", "status-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": { "merge": {"strict": True, "strict_method": "rebase"}}, } ]} self.setup_repo(yaml.dump(rules), test_branches=['stable/3.1']) p, _ = self.create_pr() p2, commits = self.create_pr() p.merge() self.push_events([ ("pull_request", {"action": "closed"}), ("check_suite", {"action": "requested"}), ]) previous_master_sha = self.r_o_admin.get_commits()[0].sha self.create_status_and_push_event(p2) self.push_events([ ("check_run", {"check_run": {"conclusion": "success"}}), ]) self.create_review_and_push_event(p2, commits[0]) self.push_events([ # Summary ("check_run", {"check_run": {"conclusion": "success"}}), ("pull_request", {"action": "synchronize"}), # Merge ("check_run", {"check_run": {"conclusion": None}}), # Merge ("check_run", {"check_run": {"conclusion": "success"}}), # Merge ("check_run", {"check_run": {"conclusion": "success"}}), ], ordered=False) p2 = self.r_o_admin.get_pull(p2.number) commits2 = list(p2.get_commits()) self.assertEquals(1, len(commits2)) self.assertNotEqual(commits[0].sha, commits2[0].sha) self.assertEqual(commits[0].commit.message, commits2[0].commit.message) # Retry to merge pr2 self.create_status_and_push_event(p2) self.push_events([ ("pull_request", {"action": "closed"}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_suite", {"action": "requested"}), ], ordered=False) master_sha = self.r_o_admin.get_commits()[0].sha self.assertNotEqual(previous_master_sha, master_sha) pulls = list(self.r_o_admin.get_pulls()) self.assertEqual(0, len(pulls)) def test_merge_strict(self): rules = {'pull_request_rules': [ {"name": "smart strict merge on master", "conditions": [ "base=master", "status-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": { "merge": {"strict": True}}, } ]} self.setup_repo(yaml.dump(rules), test_branches=['stable/3.1']) p, _ = self.create_pr() p2, commits = self.create_pr() p.merge() self.push_events([ ("pull_request", {"action": "closed"}), ("check_suite", {"action": "requested"}), ]) previous_master_sha = self.r_o_admin.get_commits()[0].sha self.create_status_and_push_event(p2) self.push_events([ ("check_run", {"check_run": {"conclusion": "success"}}), ]) self.create_review_and_push_event(p2, commits[0]) self.push_events([ # Summary ("check_run", {"check_run": {"conclusion": "success"}}), ("pull_request", {"action": "synchronize"}), # Merge ("check_run", {"check_run": {"conclusion": None}}), # Merge ("check_run", {"check_run": {"conclusion": "success"}}), # Merge ("check_run", {"check_run": {"conclusion": "success"}}), ], ordered=False) p2 = self.r_o_admin.get_pull(p2.number) commits2 = list(p2.get_commits()) # Check master have been merged into the PR self.assertIn("Merge branch 'master' into 'fork/pr2'", commits2[-1].commit.message) # Retry to merge pr2 self.create_status_and_push_event(p2) self.push_events([ ("pull_request", {"action": "closed"}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_suite", {"action": "requested"}), ], ordered=False) master_sha = self.r_o_admin.get_commits()[0].sha self.assertNotEqual(previous_master_sha, master_sha) pulls = list(self.r_o_admin.get_pulls()) self.assertEqual(0, len(pulls)) def test_merge_smart_strict(self): rules = {'pull_request_rules': [ {"name": "strict merge on master", "conditions": [ "base=master", "status-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": { "merge": {"strict": "smart"}}, } ]} self.setup_repo(yaml.dump(rules), test_branches=['stable/3.1']) p, _ = self.create_pr() p2, commits = self.create_pr() p.merge() self.push_events([ ("pull_request", {"action": "closed"}), ("check_suite", {"action": "requested"}), ]) previous_master_sha = self.r_o_admin.get_commits()[0].sha self.create_status_and_push_event(p2) self.push_events([ # Summary ("check_run", {"check_run": {"conclusion": "success"}}), ]) self.create_review_and_push_event(p2, commits[0]) self.push_events([ # Summary ("check_run", {"check_run": {"conclusion": "success"}}), # Merge ("check_run", {"check_run": {"conclusion": None}}), ]) # We can run celery beat inside tests, so run the task manually run_smart_strict_workflow_periodic_task() self.push_events([ ("pull_request", {"action": "synchronize"}), # Merge ("check_run", {"check_run": {"conclusion": None}}), # Summary ("check_run", {"check_run": {"conclusion": "success"}}), # Merge ("check_run", {"check_run": {"conclusion": "success"}}), ], ordered=False) p2 = self.r_o_admin.get_pull(p2.number) commits2 = list(p2.get_commits()) # Check master have been merged into the PR self.assertIn("Merge branch 'master' into 'fork/pr2'", commits2[-1].commit.message) # Retry to merge pr2 self.create_status_and_push_event(p2) self.push_events([ ("pull_request", {"action": "closed"}), ("check_run", {"check_run": {"conclusion": "success"}}), ("check_suite", {"action": "requested"}), ], ordered=False) master_sha = self.r_o_admin.get_commits()[0].sha self.assertNotEqual(previous_master_sha, master_sha) pulls = list(self.r_o_admin.get_pulls()) self.assertEqual(0, len(pulls)) def test_merge_failure_smart_strict(self): rules = {'pull_request_rules': [ {"name": "strict merge on master", "conditions": [ "base=master", "status-success=continuous-integration/fake-ci", ], "actions": { "merge": {"strict": "smart"}}, } ]} self.setup_repo(yaml.dump(rules), test_branches=['stable/3.1']) p, _ = self.create_pr() p2, commits = self.create_pr() p3, commits = self.create_pr() p.merge() self.push_events([ ("pull_request", {"action": "closed"}), ("check_suite", {"action": "requested"}), ]) previous_master_sha = self.r_o_admin.get_commits()[0].sha self.create_status(p2, "continuous-integration/fake-ci", "success") self.push_events([ # fake-ci statuses ("status", {"state": "success"}), # Summaries ("check_run", {"check_run": {"conclusion": "success"}}), # Merge ("check_run", {"check_run": {"conclusion": None}}), ]) # We can run celery beat inside tests, so run the task manually run_smart_strict_workflow_periodic_task() self.push_events([ ("pull_request", {"action": "synchronize"}), # Merge ("check_run", {"check_run": {"conclusion": None}}), # Summary ("check_run", {"check_run": {"conclusion": "success"}}), # Merge ("check_run", {"check_run": {"conclusion": "success"}}), ], ordered=False) self.create_status(p3, "continuous-integration/fake-ci", "success") self.push_events([ ("status", {"state": "success"}), # Summaries ("check_run", {"check_run": {"conclusion": "success"}}), # Merge ("check_run", {"check_run": {"conclusion": None}}), ]) p2 = self.r_o_admin.get_pull(p2.number) commits2 = list(p2.get_commits()) self.assertIn("Merge branch 'master' into 'fork/pr2'", commits2[-1].commit.message) self.create_status(p2, "continuous-integration/fake-ci", "failure") self.push_events([ ("status", {"state": "failure"}), ]) self.push_events([ ("check_run", {"check_run": {"conclusion": "cancelled"}}), ("check_suite", {"check_suite": {"conclusion": "cancelled"}}), ], ordered=False) # Should got to the next PR run_smart_strict_workflow_periodic_task() self.push_events([ ("pull_request", {"action": "synchronize"}), # Merge ("check_run", {"check_run": {"conclusion": None}}), # Summary ("check_run", {"check_run": {"conclusion": "success"}}), # Merge ("check_run", {"check_run": {"conclusion": "success"}}), ], ordered=False) p3 = self.r_o_admin.get_pull(p3.number) commits3 = list(p3.get_commits()) self.assertIn("Merge branch 'master' into 'fork/pr", commits3[-1].commit.message) self.create_status(p3, "continuous-integration/fake-ci", "success") self.push_events([ ("status", {"state": "success"}), ("check_run", {"check_run": {"conclusion": "success"}}), ("pull_request", {"action": "closed"}), ]) self.push_events([ ("check_run", {"check_run": {"conclusion": "success"}}), ("check_suite", {"action": "completed"}), ], ordered=False) self.push_events([ ("check_suite", {"action": "requested"}), ]) master_sha = self.r_o_admin.get_commits()[0].sha self.assertNotEqual(previous_master_sha, master_sha) pulls = list(self.r_o_admin.get_pulls()) self.assertEqual(1, len(pulls)) def test_teams(self): rules = {'pull_request_rules': [ {"name": "Merge on master", "conditions": [ "base=master", "status-success=continuous-integration/fake-ci", "approved-reviews-by=@mergifyio-testing/testing", ], "actions": { "merge": {"method": "rebase"} }}, ]} self.setup_repo(yaml.dump(rules)) p, commits = self.create_pr() pull = mergify_pull.MergifyPull.from_raw(config.INSTALLATION_ID, config.MAIN_TOKEN, p.raw_data) logins = pull.resolve_teams(["user", "@mergifyio-testing/testing", "@unknown/team", "@invalid/team/break-here"]) assert sorted(logins) == sorted(["user", "@unknown/team", "@invalid/team/break-here", "sileht", "mergify-test1"]) def test_rebase(self): rules = {'pull_request_rules': [ {"name": "Merge on master", "conditions": [ "base=master", "status-success=continuous-integration/fake-ci", "#approved-reviews-by>=1", ], "actions": { "merge": {"method": "rebase"} }}, ]} self.setup_repo(yaml.dump(rules)) p2, commits = self.create_pr() self.create_status_and_push_event(p2) self.push_events([ ("check_run", {"check_run": {"conclusion": "success"}}), ]) self.create_review_and_push_event(p2, commits[0]) self.push_events(MERGE_EVENTS, ordered=False) pulls = list(self.r_o_admin.get_pulls(state="all")) self.assertEqual(1, len(pulls)) self.assertEqual(1, pulls[0].number) self.assertEqual(True, pulls[0].merged) self.assertEqual("closed", pulls[0].state) def test_merge_branch_protection_ci(self): rules = {'pull_request_rules': [ {"name": "merge", "conditions": [ "base=master", ], "actions": { "merge": {} }}, ]} self.setup_repo(yaml.dump(rules)) # Check policy of that branch is the expected one rule = { "protection": { "required_status_checks": { "strict": False, "contexts": ["continuous-integration/fake-ci"], }, "required_pull_request_reviews": None, "restrictions": None, "enforce_admins": False, } } self.branch_protection_protect("master", rule) p, _ = self.create_pr() self.push_events([ ("check_run", {"check_run": {"conclusion": "failure"}}), ]) checks = list(check_api.get_checks(p, { "check_name": "Mergify — Rule: merge (merge)"})) self.assertEqual("failure", checks[0].conclusion) self.assertIn("Branch protection settings are blocking " "automatic merging", checks[0].output['title']) def test_merge_branch_protection_strict(self): rules = {'pull_request_rules': [ {"name": "merge", "conditions": [ "base=master", "status-success=continuous-integration/fake-ci", ], "actions": { "merge": {} }}, ]} self.setup_repo(yaml.dump(rules)) # Check policy of that branch is the expected one rule = { "protection": { "required_status_checks": { "strict": True, "contexts": ["continuous-integration/fake-ci"], }, "required_pull_request_reviews": None, "restrictions": None, "enforce_admins": False, } } p1, _ = self.create_pr() p2, _ = self.create_pr() p1.merge() self.branch_protection_protect("master", rule) self.push_events([ ("pull_request", {"action": "closed"}), ("check_suite", {"action": "requested"}), ], ordered=False) self.create_status_and_push_event(p2) self.push_events([ # Summary ("check_run", {"check_run": {"conclusion": "success"}}), # FIXME(sileht): Why twice ?? # Merge ("check_run", {"check_run": {"conclusion": "failure"}}), # Merge ("check_run", {"check_run": {"conclusion": "failure"}}), ("check_suite", {"action": "completed"}), ], ordered=False) checks = list(check_api.get_checks(p2, { "check_name": "Mergify — Rule: merge (merge)"})) self.assertEqual("failure", checks[0].conclusion) self.assertIn("Branch protection setting 'strict' conflicts with " "Mergify configuration", checks[0].output['title']) def _init_test_refresh(self): rules = {'pull_request_rules': []} self.setup_repo(yaml.dump(rules)) p1, commits1 = self.create_pr() p2, commits2 = self.create_pr() rules = {'pull_request_rules': [ {"name": "automerge", "conditions": ["label!=wip"], "actions": {"merge": {}}}, ]} self.git("checkout", "master") with open(self.git.tmp + "/.mergify.yml", "w") as f: f.write(yaml.dump(rules)) self.git("add", ".mergify.yml") self.git("commit", "--no-edit", "-m", "automerge everything") self.git("push", "--quiet", "main", "master") pulls = list(self.r_o_admin.get_pulls()) self.assertEqual(2, len(pulls)) return p1, p2 def test_refresh_pull(self): p1, p2 = self._init_test_refresh() self.app.post("/refresh/%s/pull/%s" % ( p1.base.repo.full_name, p1.number), headers={"X-Hub-Signature": "sha1=" + base.FAKE_HMAC}) self.app.post("/refresh/%s/pull/%s" % ( p2.base.repo.full_name, p2.number), headers={"X-Hub-Signature": "sha1=" + base.FAKE_HMAC}) pulls = list(self.r_o_admin.get_pulls()) self.assertEqual(0, len(pulls)) def test_refresh_branch(self): p1, p2 = self._init_test_refresh() self.app.post("/refresh/%s/branch/master" % ( p1.base.repo.full_name), headers={"X-Hub-Signature": "sha1=" + base.FAKE_HMAC}) pulls = list(self.r_o_admin.get_pulls()) self.assertEqual(0, len(pulls)) def test_refresh_repo(self): p1, p2 = self._init_test_refresh() self.app.post("/refresh/%s/full" % ( p1.base.repo.full_name), headers={"X-Hub-Signature": "sha1=" + base.FAKE_HMAC}) pulls = list(self.r_o_admin.get_pulls()) self.assertEqual(0, len(pulls)) def test_refresh_all(self): p1, p2 = self._init_test_refresh() self.app.post("/refresh", headers={"X-Hub-Signature": "sha1=" + base.FAKE_HMAC}) pulls = list(self.r_o_admin.get_pulls()) self.assertEqual(0, len(pulls)) def test_change_mergify_yml(self): rules = {'pull_request_rules': []} self.setup_repo(yaml.dump(rules)) rules["pull_request_rules"].append( {"name": "foobar", "conditions": ["label!=wip"], "actions": {"merge": {}}} ) p1, commits1 = self.create_pr(files={".mergify.yml": yaml.dump(rules)}) checks = list(check_api.get_checks(p1)) assert len(checks) == 2 assert checks[0].name == ("Mergify — disabled due to configuration " "change") assert checks[1].name == "Mergify — future config checker"
34.652733
79
0.518079
3,267
32,331
4.913682
0.105601
0.062792
0.050209
0.061795
0.773999
0.73899
0.700305
0.674017
0.646795
0.613343
0
0.010229
0.322693
32,331
932
80
34.689914
0.722577
0.05023
0
0.704545
0
0
0.248253
0.02946
0
0
0
0.001073
0.078125
1
0.03267
false
0
0.012784
0
0.048295
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e6c98b1de0e8ef1dc433b8fa89fe229a4d2d6c33
3,064
py
Python
pyscf/dmrgscf/__init__.py
nmardirossian/pyscf
57c8912dcfcc1157a822feede63df54ed1067115
[ "BSD-2-Clause" ]
1
2018-05-02T19:55:30.000Z
2018-05-02T19:55:30.000Z
pyscf/dmrgscf/__init__.py
nmardirossian/pyscf
57c8912dcfcc1157a822feede63df54ed1067115
[ "BSD-2-Clause" ]
null
null
null
pyscf/dmrgscf/__init__.py
nmardirossian/pyscf
57c8912dcfcc1157a822feede63df54ed1067115
[ "BSD-2-Clause" ]
1
2018-12-06T03:10:50.000Z
2018-12-06T03:10:50.000Z
#!/usr/bin/env python # # Author: Qiming Sun <osirpt.sun@gmail.com> # '''DMRG program interface. There are two DMRG program interfaces available: * `Block <https://github.com/sanshar/Block>`_ interface provided the features including the DMRG-CASCI, the 1-step and 2-step DMRG-CASSCF, second order pertubation for dynamic correlation. 1-, 2- and 3-particle density matrices. * `CheMPS2 <https://github.com/SebWouters/CheMPS2>`_ interface provided the DMRG-CASCI and 2-step DMRG-CASSCF. Simple usage:: >>> from pyscf import gto, scf, mcscf, dmrgscf, mrpt >>> mol = gto.M(atom='C 0 0 0; C 0 0 1', basis='631g') >>> mf = scf.RHF(mol).run() >>> mc = dmrgscf.DMRGSCF(mf, 4, 4) >>> mc.kernel() -75.3374492511669 >>> mrpt.NEVPT(mc).compress_approx().kernel() -0.10474250075684 >>> mc = mcscf.CASSCF(mf, 4, 4) >>> mc.fcisolver = dmrgscf.CheMPS2(mol) >>> mc.kernel() -75.3374492511669 Note a few configurations in ``/path/to/dmrgscf/settings.py`` needs to be made before using the DMRG interface code. Block ----- :class:`DMRGCI` is the main object to hold Block input parameters and results. :func:`DMRGSCF` is a shortcut function quickly setup DMRG-CASSCF calculation. :func:`compress_approx` initializes the compressed MPS perturber for NEVPT2 calculation. In DMRGCI object, you can set the following attributes to control Block program: outputlevel : int Noise level for Block program output. maxIter : int Max DMRG sweeps approx_maxIter : int To control the DMRG-CASSCF approximate DMRG solver accuracy. twodot_to_onedot : int When to switch from two-dot algroithm to one-dot algroithm. nroots : int Number of states in the same irreducible representation to compute. weights : list of floats Use this attribute with "nroots" attribute to set state-average calculation. restart : bool To control whether to restart a DMRG calculation. tol : float DMRG convergence tolerence maxM : int Bond dimension scheduleSweeps, scheduleMaxMs, scheduleTols, scheduleNoises : list DMRG sweep scheduler. See also Block documentation wfnsym : str or int Wave function irrep label or irrep ID orbsym : list of int irrep IDs of each orbital groupname : str groupname, orbsym together can control whether to employ symmetry in the calculation. "groupname = None and orbsym = []" requires the Block program using C1 symmetry. CheMPS2 ------- In :class:`CheMPS2`, DMRG calculation can be controlled by: | wfn_irrep | dmrg_states | dmrg_noise | dmrg_e_convergence | dmrg_noise_factor | dmrg_maxiter_noise | dmrg_maxiter_silent See http://sebwouters.github.io/CheMPS2/index.html for more detail usages of these keywords. ''' from pyscf.dmrgscf import dmrgci from pyscf.dmrgscf.dmrgci import DMRGCI, DMRGSCF, dryrun from pyscf.dmrgscf import chemps2 from pyscf.dmrgscf.chemps2 import CheMPS2
31.587629
84
0.696149
413
3,064
5.121065
0.491525
0.021277
0.03026
0.011348
0.017021
0
0
0
0
0
0
0.031368
0.219648
3,064
96
85
31.916667
0.8532
0.939621
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
e6e04fb65024449204029c3c7e0f5cd074a82d59
2,152
py
Python
src/Completed/deque.py
ChrisAnthonyHudson/data-structures
5a69800387aab5fe46120a9a1b34d6389a8fc852
[ "MIT" ]
null
null
null
src/Completed/deque.py
ChrisAnthonyHudson/data-structures
5a69800387aab5fe46120a9a1b34d6389a8fc852
[ "MIT" ]
null
null
null
src/Completed/deque.py
ChrisAnthonyHudson/data-structures
5a69800387aab5fe46120a9a1b34d6389a8fc852
[ "MIT" ]
null
null
null
"""Implementation of a dequeue data structure.""" from doubly_linked_list import DoublyLinkedList class Deque(object): """Set attributes and methods of Dequeue object.""" def __init__(self): """Initialize Dequeue using LinkedList.""" self._doubly_linked_list = DoublyLinkedList() self.head = self._doubly_linked_list.head self.tail = self._doubly_linked_list.tail def append(self, val): """Add value to the front of a Dequeue.""" self._doubly_linked_list.append(val) self.head = self._doubly_linked_list.head self.tail = self._doubly_linked_list.tail def appendleft(self, val): """Add value to the back of a Dequeue.""" self._doubly_linked_list.push(val) self.head = self._doubly_linked_list.head self.tail = self._doubly_linked_list.tail def pop(self): """Remove the back of the Dequeue.""" if not self._doubly_linked_list.tail: raise IndexError('The Deque is empty.') popped = self._doubly_linked_list.shift() self.tail = self._doubly_linked_list.tail self.head = self._doubly_linked_list.head return popped def popleft(self): """Remove the front of the Dequeue.""" if not self._doubly_linked_list.head: raise IndexError('The Deque is empty.') popped = self._doubly_linked_list.pop() self.head = self._doubly_linked_list.head self.tail = self._doubly_linked_list.tail return popped def peek(self): """Return the Dequeue tail.""" if self._doubly_linked_list.tail is None: return None return self._doubly_linked_list.tail.val def peekleft(self): """Return the Dequeue head.""" if self._doubly_linked_list.head is None: return None return self._doubly_linked_list.head.val def size(self): """Return the size of the queue.""" return self._doubly_linked_list._length def __len__(self): """Return the size of Dequeue, overwriting len method.""" return self._doubly_linked_list._length
34.15873
65
0.651952
280
2,152
4.721429
0.207143
0.217852
0.290469
0.347958
0.661876
0.581694
0.503026
0.409228
0.409228
0.289713
0
0
0.253253
2,152
62
66
34.709677
0.822651
0.184015
0
0.45
0
0
0.022353
0
0
0
0
0
0
1
0.225
false
0
0.025
0
0.475
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
e6e0b63bfc6ec27f79529733de7c06c25fff42d7
193
py
Python
08 Other Common Tasks/importing_4.py
gitter-badger/survival-python
c9c7f336ecd0b8196934386d334f53cd79cb7284
[ "MIT" ]
1
2020-12-19T04:41:50.000Z
2020-12-19T04:41:50.000Z
08 Other Common Tasks/importing_4.py
gitter-badger/survival-python
c9c7f336ecd0b8196934386d334f53cd79cb7284
[ "MIT" ]
null
null
null
08 Other Common Tasks/importing_4.py
gitter-badger/survival-python
c9c7f336ecd0b8196934386d334f53cd79cb7284
[ "MIT" ]
3
2020-01-08T00:34:24.000Z
2021-07-08T23:06:52.000Z
from importing_3 import teacher_comment_with_grade students = {'Tony': 95, 'Betsy': 85, 'Peter': 75} for student in students: print(teacher_comment_with_grade(students[student], student))
32.166667
65
0.766839
27
193
5.222222
0.703704
0.198582
0.255319
0.326241
0.439716
0
0
0
0
0
0
0.041176
0.119171
193
5
66
38.6
0.788235
0
0
0
0
0
0.072539
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0.25
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
e6e404ef1191ab6c918cc7bf3b118561cd94d2df
258
py
Python
pip_services3_expressions-3.3.4/pip_services3_expressions/tokenizers/ICommentState.py
pip-services3-python/pip-services3-expressions-python
4ea237fbbba32e62f920e6be3bd48e6cc02184e5
[ "MIT" ]
null
null
null
pip_services3_expressions-3.3.4/pip_services3_expressions/tokenizers/ICommentState.py
pip-services3-python/pip-services3-expressions-python
4ea237fbbba32e62f920e6be3bd48e6cc02184e5
[ "MIT" ]
null
null
null
pip_services3_expressions-3.3.4/pip_services3_expressions/tokenizers/ICommentState.py
pip-services3-python/pip-services3-expressions-python
4ea237fbbba32e62f920e6be3bd48e6cc02184e5
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from abc import ABC from pip_services3_expressions.tokenizers.ITokenizerState import ITokenizerState class ICommentState(ITokenizerState, ABC): """ Defines an interface for tokenizer state that processes comments. """
21.5
80
0.748062
28
258
6.821429
0.785714
0
0
0
0
0
0
0
0
0
0
0.009302
0.166667
258
11
81
23.454545
0.87907
0.341085
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
e6e59312cd0496a70576a4bef68fc925d6b7716a
95
py
Python
napari_clio_test/_tests/test_writer.py
DocSavage/napari-clio-test
7523374a2ae41549a220fd9a3c2973b94780653d
[ "Apache-2.0" ]
null
null
null
napari_clio_test/_tests/test_writer.py
DocSavage/napari-clio-test
7523374a2ae41549a220fd9a3c2973b94780653d
[ "Apache-2.0" ]
null
null
null
napari_clio_test/_tests/test_writer.py
DocSavage/napari-clio-test
7523374a2ae41549a220fd9a3c2973b94780653d
[ "Apache-2.0" ]
null
null
null
# from napari_clio_test import napari_get_writer, napari_write_image # add your tests here...
23.75
68
0.810526
15
95
4.733333
0.866667
0
0
0
0
0
0
0
0
0
0
0
0.126316
95
3
69
31.666667
0.855422
0.936842
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
e6e9a68059650ada6021a3eb3a4f68c7fe0c54eb
61
py
Python
pdf-file-manager.py
firminoneto11/pdf-file-manager
ede3bda9933fda5977017f6de2dcccf41027d47e
[ "MIT" ]
null
null
null
pdf-file-manager.py
firminoneto11/pdf-file-manager
ede3bda9933fda5977017f6de2dcccf41027d47e
[ "MIT" ]
null
null
null
pdf-file-manager.py
firminoneto11/pdf-file-manager
ede3bda9933fda5977017f6de2dcccf41027d47e
[ "MIT" ]
null
null
null
from main_foo import main # Executando a função main main()
12.2
26
0.770492
10
61
4.6
0.7
0
0
0
0
0
0
0
0
0
0
0
0.180328
61
4
27
15.25
0.92
0.393443
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
e6ede2307bb46748a020075e6b138ed6fc59e70d
74,344
py
Python
team61.py
anuragxel/ultimate-tic-tac-toe
69cdfce1a2cfcd028f1a4467066a69b8ce5216c4
[ "MIT" ]
null
null
null
team61.py
anuragxel/ultimate-tic-tac-toe
69cdfce1a2cfcd028f1a4467066a69b8ce5216c4
[ "MIT" ]
null
null
null
team61.py
anuragxel/ultimate-tic-tac-toe
69cdfce1a2cfcd028f1a4467066a69b8ce5216c4
[ "MIT" ]
null
null
null
import random import json import datetime import signal ENFORCED_TIME = 5 heuristic_table = {"-ox--o-xo": 0, "-xoooxx-o": 0, "-ox-x-oxo": 0, "xo-ooxx--": 7, "oxox-----": 4, "----xx-oo": 6, "ox--xooox": 3, "xx-o-ooxo": 2, "-xo-xoo--": 8, "-oxxo-o-x": 5, "-x--ooxo-": 3, "oox-o--xx": 5, "oxooxox-x": 7, "x--ooxxoo": 1, "oxx-o----": 8, "--x-o-x-o": 0, "xo--xooxo": 2, "o-xoo--xx": 6, "-ooxoox-x": 0, "-xxoo-xo-": 5, "----xoxoo": 2, "----x---o": 3, "oxx-xoo--": 7, "-x-oxo-ox": 6, "xox-x--oo": 6, "x--o-o---": 4, "xxo-ox-o-": 6, "-ox--o-x-": 6, "-xoooxx--": 8, "oox-o--x-": 8, "-x--ooxox": 3, "oxooxox--": 7, "----o-xo-": 1, "xxox-o-o-": 6, "----ox---": 8, "-oo-x-x--": 0, "x-xo-oox-": 4, "oxo----ox": 3, "xo-o-oxx-": 4, "-x-oxo-o-": 2, "x--ooxxo-": 1, "------oox": 2, "xo--xox-o": 3, "xo--xoox-": 8, "-o---x---": 2, "-o-xo-xxo": 0, "ooxoxx---": 6, "----xoxo-": 2, "xo-oxo-xo": 2, "o--o-x-x-": 6, "oxx-xoo-o": 7, "o---xx--o": 1, "ox-x-o-xo": 4, "-ox--o---": 3, "-o-x-o---": 2, "-o-oxo-x-": 6, "o-xx-ooxo": 4, "-oo----xx": 6, "xoo--xo-x": 4, "---oxo---": 8, "-xo--xxoo": 3, "-x-o--oox": 0, "oo--x--x-": 2, "---x-o--o": 2, "x-xxoo--o": 1, "-o-xxoxoo": 0, "x-xo-ox-o": 1, "o--xo-oxx": 2, "ooxoox-x-": 8, "oxoo---xx": 4, "o-----xox": 2, "o-ox----x": 1, "xoxxxo-oo": 6, "xox-ooox-": 3, "xo-xxo-o-": 8, "---oxo--x": 7, "ox--x-xoo": 2, "oo-oxoxx-": 8, "-o-ox-oxx": 0, "--xoo--ox": 5, "x-oxox-o-": 6, "-x--xo-o-": 0, "-o-oxo-xx": 6, "-ox--o--x": 6, "oo--x--xo": 2, "--oxo-x--": 8, "-xo---x-o": 5, "-oo----x-": 0, "xoxo-ox--": 4, "---x-o---": 7, "-o-xxoxo-": 2, "xox-oooxx": 3, "o-o-xxoox": 3, "o--x-o-ox": 1, "o-----xo-": 1, "xooo-x-xo": 4, "o---xxxoo": 1, "oxo-xxo--": 3, "ooxx-oxox": 4, "----oxxo-": 1, "xo-o-----": 4, "-x-oo-x--": 5, "--oxx-o-o": 5, "--oooxxx-": 8, "--ooxo-x-": 8, "-xooxoxox": 0, "xoxoxo-xo": 6, "-o--x-x-o": 3, "xx---ooox": 3, "ox-x--xoo": 4, "-o-xoooxx": 2, "-xx-o-oxo": 0, "ox-x-oo-x": 7, "xo-oxo-x-": 2, "x--xoo---": 6, "-oxoo-oxx": 5, "-x-ox--oo": 6, "x-ooxxo--": 8, "oxx---oxo": 4, "o-x---ox-": 3, "ooxx-x-o-": 4, "---x-ox-o": 0, "ooxoxo-x-": 6, "xo-o-o-xx": 6, "-o-----xo": 0, "-x--o----": 2, "-xooxoxo-": 8, "--oxx-o--": 0, "xo-xxooo-": 8, "x-ox-oo-x": 4, "oxo-o-xx-": 8, "oox-x--ox": 6, "-o-x-o-ox": 4, "-o-x-oxxo": 2, "-----oxo-": 0, "xo-x-oox-": 8, "--o--xoox": 4, "-oooox-xx": 6, "x--xoo--o": 6, "ox-xoox--": 8, "x---xo--o": 2, "oxooo-xx-": 8, "-x-ox--o-": 0, "x-ooxxo-o": 7, "--xoxxoo-": 8, "x-o-ox-xo": 6, "-x---x-oo": 6, "---xooo-x": 2, "-xxxo-oo-": 8, "---oox-ox": 2, "xo-o-x---": 8, "oox--o-xx": 6, "ox--oxo-x": 2, "-x-x-oo--": 2, "oxx--o--o": 4, "--xooxx-o": 0, "ooxx-xoxo": 4, "oxooxo--x": 7, "o--xxo-o-": 2, "-x-oxo---": 0, "x-o-oox-x": 7, "oxo-oxxo-": 8, "o---x-xo-": 3, "o---x--ox": 5, "-x-xo-oox": 2, "---xo----": 0, "--o--x-ox": 4, "-o-xxo-xo": 2, "oxxxxoo-o": 7, "-oox---x-": 0, "--xox-oxo": 1, "-xo--o--x": 4, "oxoox-xo-": 5, "xo------o": 6, "-ooooxx-x": 7, "oox--o-x-": 6, "xo-o-x--o": 7, "oxoxxo-o-": 8, "xxo-o----": 6, "ooxox---x": 6, "oxooxxxo-": 8, "o--xoo-xx": 6, "-xo-xoo-x": 7, "x-o-oxxo-": 3, "oxoxoo-xx": 6, "oxx--o---": 4, "-xoo--oxx": 4, "o---x-xoo": 3, "o-oo-xxx-": 8, "--oxoxxo-": 0, "oxoox-xox": 5, "xoo-oo-xx": 6, "oxo-oxxox": 3, "x-x-oxo-o": 1, "ooxxx---o": 7, "---xxo-o-": 0, "ooxoxx--o": 6, "xo-------": 4, "-xxo--oox": 5, "xox-xoo-o": 7, "xoxooxo--": 8, "---x-oox-": 4, "-x-o-----": 4, "x-xoo--xo": 1, "xx-oox-o-": 2, "-ox-xo-xo": 6, "-o----ox-": 2, "oxox-x-o-": 4, "---o---ox": 2, "xo-x--o--": 4, "-o-xoox--": 7, "xox-o--xo": 5, "-xxo-o---": 0, "xo--xoxo-": 2, "--ox--xoo": 0, "x---xooxo": 1, "x-x---o-o": 1, "xox-o----": 7, "o-oxo--xx": 6, "x-ooo-x--": 5, "-o----oxx": 2, "xox-xoo--": 8, "x-oxx-oo-": 8, "ox-oxx--o": 6, "xx-oox-oo": 2, "oxox-o-ox": 4, "oxx--o-xo": 4, "xoxo-xxoo": 4, "-x-xooox-": 2, "-o-xoox-x": 7, "---ox-oox": 0, "-xx-oo-xo": 0, "x-x--ooxo": 1, "o---x-oxo": 1, "--xoxx-oo": 6, "oox--o--x": 6, "oox-oxx--": 8, "-xxo-o--o": 0, "--ox--xo-": 5, "x-o-ox---": 6, "xox-o---o": 7, "-o--oxoxx": 2, "---ooxx-o": 0, "--o---xo-": 1, "-xxoxoo--": 7, "oo-o--x-x": 7, "xx-ooxo--": 2, "ox----oox": 3, "-o-x-xxoo": 0, "-o--o-xxo": 0, "xo-ooxox-": 2, "xox-oox-o": 3, "xoxox-oo-": 8, "xo-ox--xo": 5, "-oxox----": 5, "o--xoxoox": 2, "-xxoo-xoo": 0, "o-xx-oo-x": 7, "ox-ox----": 6, "oox-xo---": 6, "ox-oo-x-x": 7, "x--x-o-o-": 6, "oxx-x-o-o": 7, "oox-ox-x-": 8, "-xoo---ox": 4, "ox-xo-oox": 2, "-xxoxoo-o": 7, "o-x-ooxx-": 8, "-xoox-oox": 0, "ox--o--ox": 5, "-o--o-xx-": 0, "oxxooxxo-": 8, "--o---xox": 0, "x---xoxoo": 2, "x-o----ox": 4, "x--oxo-xo": 1, "x-x-xo-oo": 6, "o-xx-oo--": 8, "xx-o-oo-x": 4, "o-xxxo--o": 1, "-oxox---o": 6, "-ooxxo--x": 0, "oxxoo-xox": 5, "--o-x----": 7, "ox-ox---o": 7, "oox-xo-x-": 6, "o-xxooo-x": 1, "--xox-o--": 0, "-xx-o-xoo": 0, "x-o--xo--": 4, "oo--xoxx-": 8, "-xx--o--o": 6, "ox-o-x---": 6, "ox-xxo-o-": 6, "----xoo--": 8, "o-xxoxo--": 8, "-xx-ooxoo": 0, "x-x-xooo-": 1, "xoo---x--": 5, "xox---oxo": 4, "xo-o--xxo": 2, "xx-xoooox": 2, "--o-oxxox": 1, "o-xooxxo-": 8, "--oo-xx-o": 4, "x-o-o--x-": 6, "xox-oo-x-": 3, "xo-oxx--o": 7, "xooo-xx-o": 7, "oox-x-oox": 5, "x--ooxox-": 2, "--o-xx-oo": 3, "--oxx-xoo": 5, "-oxxo----": 7, "oox-xo-xo": 6, "xoxoox-xo": 6, "-xooox-x-": 6, "xoxoxx-oo": 6, "oo-oxxxxo": 2, "o-xo--xox": 4, "xooxxoox-": 8, "-oxox-o-x": 0, "-o-x--o-x": 4, "oo-x---x-": 2, "x-oo-xoox": 4, "ooxo---x-": 6, "-oxo-x-xo": 0, "xo-xxo-oo": 6, "x-ooxo-x-": 8, "oo-o-xxx-": 8, "--oo-xx--": 8, "o-x----ox": 6, "xoooox-xx": 6, "x--ooxoxo": 2, "x-o-x-o--": 7, "--o-xx-o-": 0, "xx-xoooo-": 2, "x--oxxoo-": 8, "-o--oox-x": 7, "ox-ox-x-o": 5, "--ooxx-o-": 0, "x-ooox--x": 6, "---xo-x-o": 0, "--xoxo-xo": 6, "xx-oxooo-": 2, "x-xo---o-": 4, "--o----x-": 8, "--o-x-ox-": 0, "---ox--xo": 1, "x-xoo----": 5, "oxox-ooxx": 4, "-oxooxx--": 8, "oo--o-xx-": 8, "x-ox-x-oo": 6, "xo--xoxoo": 3, "x--o---o-": 2, "-o-ox-xo-": 8, "oo-ox-xx-": 8, "xx--o---o": 6, "xoxoo--xo": 5, "xoooxxxo-": 8, "ooxx-ox--": 4, "o-o--xx--": 1, "xxoxoo---": 6, "-xox---o-": 6, "oo-xx--ox": 2, "oo-x----x": 2, "oxoxo---x": 6, "---xo--xo": 0, "x-----oxo": 1, "--o-x-oxo": 1, "x-xo---oo": 6, "x-xooxo-o": 1, "-o--xoo-x": 0, "-xo--oxo-": 8, "-oox-o--x": 0, "x-ooox---": 6, "--ooxx-ox": 0, "x-xoo---o": 1, "xx-oo-oox": 2, "--oooxxox": 1, "xx-o-oxo-": 4, "ox-x-o---": 7, "--o-xo-x-": 8, "--o--o--x": 6, "oo-ox-xxo": 2, "x-x-ooxo-": 1, "x----o-xo": 2, "xxoo-xo--": 4, "-o-ox-xox": 2, "ox---o-ox": 4, "xox--o-o-": 4, "o-o-oxxx-": 8, "-oo-xx-o-": 3, "xoo--oxox": 3, "ox-----ox": 3, "xoo-o--xx": 6, "x-x-ooo-x": 1, "xoxoo-xxo": 5, "-x-o-x--o": 0, "xooox-xxo": 5, "x-ooox-x-": 6, "-xo-x-o-o": 7, "xx-oxo-oo": 2, "--x--o-xo": 3, "-xoox--ox": 0, "oo--xxo--": 3, "-xo-oxxo-": 8, "oxo-xxo-o": 7, "xo-o--ox-": 5, "-ooo-xx-x": 7, "oxo-xxxoo": 3, "--x-ooo-x": 3, "xooxox---": 6, "-x--oxo--": 2, "xox-ox--o": 7, "xxo--ooox": 4, "ox-x----o": 4, "oo----xx-": 2, "-xox-o---": 8, "xxoox---o": 7, "-o---xx-o": 4, "xo-xo--xo": 6, "-oo-xx-ox": 0, "ox-----o-": 6, "x----ox-o": 2, "x-xo-o-o-": 1, "-x-oxooox": 0, "xx-oxo-o-": 8, "-xo-x-o--": 5, "xoo-o--x-": 6, "o-o-o-x-x": 7, "-oxo-x---": 8, "--o--oxox": 0, "o--x-xxoo": 4, "--xoox-o-": 8, "-xooxoo-x": 7, "xooxox--o": 6, "o-o----xx": 1, "oo--xxo-x": 3, "---o--xxo": 4, "o-ox-xoox": 4, "x-o--xoxo": 4, "o-xxoo--x": 1, "--oox--xo": 1, "o---o---x": 6, "ooxx--oxo": 4, "xoo-xxo--": 8, "--x--o---": 4, "-o-xo--x-": 6, "-oxoxxoxo": 0, "xoo-x----": 3, "--ox-oxox": 0, "xoooo--xx": 6, "x--oo-x-o": 5, "o-ooxxx-o": 1, "--xx--oo-": 8, "x--ox-o-o": 7, "-xxxooxoo": 0, "--oxx--o-": 0, "x-xoxo--o": 7, "x---o--o-": 1, "---ooxoxx": 2, "o-x-xxo-o": 3, "ox--x-o-o": 7, "oxxo--xo-": 5, "-oxoxooxx": 0, "-o----x--": 4, "--x---xoo": 0, "xx-o----o": 4, "--oox--x-": 6, "oxx--oxo-": 4, "oxo--xx-o": 4, "o-xxoo---": 8, "--x--o--o": 0, "o-xoo---x": 5, "xo-ox-xo-": 5, "-oo--x-x-": 0, "o--oxx-o-": 6, "oxoox--ox": 6, "xxooo-x--": 5, "xoo-x---o": 5, "x--oo-x--": 5, "o-ooxxx--": 1, "o--o-xxxo": 4, "x---o--ox": 1, "xx-oxo--o": 7, "x--ox-o--": 7, "oox-xxoxo": 3, "o-o-x--ox": 1, "-oxoxoox-": 0, "-oo-xxx-o": 3, "xo--x-oo-": 8, "oxxo--xoo": 4, "ox-x-xoo-": 8, "-o---xo-x": 4, "-ooxx-o-x": 0, "xoox-oo-x": 4, "oox-x-ox-": 3, "-oxo---x-": 6, "oo-xxo-x-": 2, "-o-x-ox--": 2, "xo-oxox-o": 2, "ooxxo--x-": 8, "x-xo-o---": 1, "xoo--xoxo": 4, "-ox-o-o-x": 5, "oxox-xoox": 4, "xox--ox-o": 4, "oox-x----": 3, "---xx-o-o": 5, "o-x-xo-ox": 6, "--o-ox-x-": 6, "x-xx-ooo-": 8, "---o--x-o": 5, "xx--oo-ox": 2, "x--oxox-o": 2, "-ox-o-ox-": 8, "-o-x-o--x": 0, "x-oox-oxo": 1, "ooxo-x-x-": 8, "-ooxx-o--": 5, "-o-o--xox": 4, "o---o--xx": 5, "-oo-o-xx-": 0, "-oo-xoxx-": 8, "xo---xoxo": 4, "-oxo---xo": 0, "oox-x-oxo": 3, "x-xo-o-xo": 1, "--xo--ox-": 0, "xo-oxox--": 8, "x-oox-xoo": 5, "o--x--x-o": 4, "xoo--xox-": 4, "-ox-o-oxx": 5, "-xooo-xox": 5, "---o-xo--": 0, "o--o-xxo-": 2, "o-xox----": 6, "x-oox-ox-": 1, "---ooxx--": 8, "--o-x--o-": 6, "----xoo-x": 0, "-x-ox-oox": 0, "-xoo-ox-x": 7, "xx--oox-o": 3, "o-oxo-x-x": 7, "ooxo--x-x": 5, "-oxxooox-": 0, "xx--ox-oo": 2, "xx--oo-o-": 3, "oox--oxox": 4, "-oxx-oox-": 0, "x-xoo-xoo": 1, "xox-oo-xo": 3, "o-xx-o--o": 4, "x-oo-xoxo": 4, "-oxx-oo--": 4, "-o-xx-oox": 0, "-o--x---o": 0, "---o-x-xo": 0, "xoxo-xo--": 8, "--x-----o": 1, "x--o-ox--": 4, "ox-x-x-oo": 4, "-oxox-o--": 0, "-x--xooo-": 8, "xxo-o-x-o": 3, "-ooxx-ox-": 0, "xoooox-x-": 6, "-oxxoooxx": 0, "ox-oox-ox": 2, "--xo--o-x": 5, "o-xooxx--": 8, "--o-x--ox": 5, "-o--xx--o": 3, "o-xxx-oo-": 5, "-oxx-ooxo": 4, "x-xx-oo-o": 1, "x-xoo-xo-": 1, "ox-xo----": 8, "o-xx-o---": 8, "ooxooxx--": 8, "oo-xo--xx": 6, "x-----oox": 5, "-oxoo---x": 5, "-o--x----": 6, "-x--xooox": 0, "-o-xx-oo-": 5, "-oxx-oo-x": 4, "--o-o-xx-": 0, "-oxx-xoo-": 8, "-ooxx-oxo": 5, "--ooxx-xo": 1, "-xx--oo-o": 0, "x-ooxoxo-": 8, "x-ooo--xx": 6, "o-o-xx--o": 1, "-ox--xoxo": 4, "oo--xxx-o": 3, "o--x-oxox": 2, "--o-xo-ox": 0, "xoxox--o-": 8, "x-xo-oo-x": 1, "xo-ooxo-x": 2, "xo-xooox-": 2, "oxo-xox--": 7, "--ox--o-x": 4, "xo---oxox": 4, "x-xxoooxo": 1, "o-----xxo": 4, "-o-x-xoox": 2, "--oo-x-x-": 1, "oxx--ox-o": 4, "-oxoxxoo-": 8, "-o--x-oox": 2, "-------xo": 0, "-xooxx--o": 7, "---xo--ox": 1, "o-xx--xoo": 4, "-xx-o-o--": 0, "o-oxo-xx-": 8, "---------": 6, "-o-xooxxo": 0, "--o-x-oox": 0, "-o-ox-xxo": 2, "-xx--oo--": 7, "xo-o-xox-": 8, "-o-oxxx-o": 2, "o-o-xx---": 1, "-oxoxxo--": 8, "-xoox--o-": 8, "ox----o-x": 3, "xoxox--oo": 6, "x-xoxooxo": 1, "-o--xooxx": 0, "-o-oxoxox": 2, "-xxoo----": 0, "o-oxxo--x": 1, "o---x--xo": 6, "xoxo-oo-x": 4, "xx-oo-ox-": 2, "o-x-xo-o-": 6, "ooxx-xo--": 7, "o--oxx-ox": 2, "xo---oxo-": 4, "-oxoxo-ox": 0, "o--xox-ox": 2, "--------o": 4, "-o-xooxx-": 8, "-xx---o-o": 7, "--ooxo--x": 0, "xoox-x-oo": 4, "-xo-o---x": 6, "ooxxxo-o-": 6, "ooxo-xx-o": 4, "o-ooxox-x": 7, "ooxxoxox-": 8, "o--x-ooxx": 1, "o---xxo-o": 3, "xoo-x-xoo": 3, "o-x--x--o": 4, "x--oox-o-": 1, "xxooxoo--": 7, "xoox--o--": 4, "---o-xo-x": 0, "-x-ooxoox": 2, "xoo----x-": 3, "o---oxx--": 8, "oox-oo-xx": 6, "-xx----oo": 0, "-o-oxx-o-": 8, "-oxox--xo": 6, "-x--o-x-o": 0, "-xox-ooox": 4, "---oxx-o-": 2, "-xx--ooxo": 4, "-x-xoooox": 2, "-ooxxox--": 0, "-oxx-o-xo": 6, "xoox-x-o-": 4, "ooxo-xx--": 8, "o-x-oo--x": 3, "---o---x-": 4, "-x-oo---x": 5, "-xxoo--xo": 0, "----x-xoo": 0, "o---xxo--": 3, "-xo-ooxox": 3, "---xxooox": 0, "xx--o-o--": 2, "-xox-oxo-": 0, "o-oooxxx-": 8, "-ooox-xox": 0, "oxxxooo-x": 7, "xx-o-o-ox": 4, "xox-o-o-x": 5, "oxoo-xxxo": 4, "xoo----xo": 5, "xoox--o-x": 4, "x--oox-ox": 2, "------xo-": 4, "xoooxx--o": 7, "o----x-xo": 4, "x-xo--oox": 4, "--xo---ox": 5, "xoxx-o-o-": 6, "----xoox-": 1, "xox-x-oo-": 8, "-o-oo--xx": 6, "oxo-x--o-": 8, "-x-x-ooox": 0, "-xxxooo-o": 0, "x-x-o---o": 1, "----o-oxx": 2, "oxxoxo-o-": 6, "xxo-x-oo-": 8, "x-xo-xo-o": 1, "---oxoxo-": 0, "-oo-x--ox": 0, "ox-xx-o-o": 5, "x-xo--oo-": 8, "xoooxx---": 8, "o-xx--o--": 5, "o---ox-x-": 8, "x-o-oo-xx": 6, "xo--oxox-": 2, "xo-xxooxo": 2, "--x---oxo": 1, "-x----o--": 0, "----xooxo": 1, "oox----ox": 4, "oxxoxo-ox": 6, "x-oxoo-ox": 6, "ox-x---o-": 8, "---oxoxox": 2, "-xxxooo--": 0, "-oxo-oxx-": 4, "oox-oox-x": 7, "xxooox--o": 6, "ox-x-oxo-": 2, "--x-x-o-o": 7, "ox-xxoo-o": 7, "x-xo-oxo-": 4, "x--xoo-o-": 6, "xoxoxxoo-": 8, "x--oxooxo": 1, "-o--oxx--": 7, "xox-o-ox-": 5, "xo--oox--": 3, "-oxxxoo-o": 7, "-oxo-xxo-": 8, "xoxx-ooox": 4, "xoxo--oox": 4, "ooxxoxo--": 8, "xooxxoo--": 8, "oxo-xo---": 7, "o-xxo-o-x": 5, "x--oo--x-": 5, "ox-xxo--o": 2, "x-ox--o--": 4, "-xo--oxox": 0, "xoxo-o-xo": 4, "x-o--oxo-": 3, "o-xxxoo--": 7, "x--oo---x": 5, "-xxo-oxo-": 4, "-o-x--xo-": 0, "-xo--x--o": 4, "x-xo-oxoo": 1, "ox-xxoo--": 7, "x--oxoox-": 1, "-o-oox-xx": 2, "ooxx-oxxo": 4, "oxx---oox": 5, "xx-x-ooo-": 8, "oo-xxoxox": 2, "xox-o-oxo": 5, "-oxxxoo--": 0, "-xo--xoox": 4, "xx-o-o---": 4, "xoxxo-oxo": 5, "x--oo----": 5, "xx-----oo": 6, "-oxxxo--o": 6, "x----x-oo": 6, "x---o-o-x": 2, "--oxooxox": 0, "x-xoo-ox-": 1, "xooxx-oo-": 5, "oxo-xo--x": 7, "--oox--ox": 0, "oxo-xx--o": 6, "-o-x--xoo": 0, "x-o--oxox": 4, "x----o---": 2, "xoxoxo---": 6, "-xx-oo-ox": 0, "-xoxoxxoo": 0, "xo-oxo---": 6, "-o-oxooxx": 0, "xo--o--xo": 6, "x--oox-xo": 6, "oo--x-x--": 2, "---ox-x-o": 1, "--o-o-xox": 1, "--oxo-xxo": 0, "---ox---o": 6, "-xooo--xx": 6, "-x-x--oo-": 8, "-ox---o--": 8, "-oxxoox-o": 0, "o--ox-xo-": 2, "xoo--ooxx": 4, "--o-xoox-": 1, "-xo-xxoo-": 3, "--o-xxoo-": 3, "ox-o-oxox": 4, "-o--xoxo-": 3, "-xoxo-xo-": 0, "-----x--o": 0, "o---xoo-x": 3, "ooxxx-o-o": 5, "xoxoxo--o": 6, "--xx-o--o": 0, "-o-oxx-xo": 0, "-xxooxxoo": 0, "oxoooxxx-": 8, "-xx-oo-o-": 0, "-x---o---": 2, "-ooo--x-x": 7, "o-x-x--o-": 6, "-ox-----o": 4, "xo--o--x-": 6, "ox--xxo-o": 3, "----o-xxo": 0, "oxox-xxoo": 4, "ox-oo-xx-": 8, "--o-xxoox": 1, "oxxo-x-o-": 8, "oxo--x---": 4, "xooxx---o": 5, "-oxooxox-": 8, "---ox----": 8, "-oxxoox--": 0, "ooxx-o-xo": 4, "-o--xoxox": 3, "x-x--o--o": 7, "xooxox-xo": 6, "-xoxo-xoo": 0, "xoo-xox--": 3, "-ox---o-x": 4, "ooxxx-o--": 5, "---o-o--x": 4, "oo-x-xoox": 2, "xx-xo--oo": 6, "-o-x-xoxo": 4, "xo----xoo": 4, "o-oox-xxo": 1, "xoxoox--o": 7, "xxo--oox-": 4, "-o--xxo--": 3, "oo--xo--x": 2, "-xoxoo--x": 6, "xooox-x--": 8, "o-xoxo-ox": 6, "o--ox---x": 6, "x--o-x--o": 4, "-o-o-x-x-": 8, "-o---ox-x": 4, "xoo-x-oxo": 5, "-ox-x-o--": 5, "xx--ooo-x": 2, "xx--o-oxo": 2, "-xoo-x---": 6, "o--xxooxo": 1, "----xxo-o": 7, "-x---oxoo": 2, "oxx-oo--x": 3, "-x--oxxoo": 0, "xo----xo-": 4, "o-oox-xx-": 1, "--ooo-x-x": 7, "x-o-xoxo-": 8, "xoxoox---": 8, "-oxx--oxo": 0, "-o--x-o-x": 0, "-o--xxo-o": 3, "oo----x-x": 7, "-o-----ox": 4, "-oo-xxoox": 3, "--oo-ox-x": 7, "o-x-oxox-": 8, "x-x-ooox-": 1, "-x-x-o--o": 2, "x-o-x-oxo": 1, "o-xo----x": 6, "xoo-x-ox-": 3, "oxoxx-oox": 5, "-ox-x-o-o": 7, "o-oxoox-x": 7, "o--xxoox-": 1, "ox-oxxxoo": 2, "o--ox----": 6, "-x---oxo-": 2, "--ox-----": 0, "o--oxxx-o": 7, "--o--o-xx": 3, "ooxxooxx-": 8, "o-xx-oxo-": 4, "ooxxxo-ox": 6, "x-oooxxo-": 1, "--x-xoo--": 1, "oxo--ooxx": 4, "xoxxxoo-o": 7, "x-ox---o-": 6, "-xoox-xoo": 5, "--xo-o---": 4, "-xoo-xxo-": 8, "xoooxxo--": 8, "----oox-x": 3, "-xo-o-xox": 3, "xooo--x--": 8, "xoo-x-xo-": 8, "xoo--xx-o": 3, "o--ox-xxo": 1, "ox--o-x--": 8, "oxxxoo---": 8, "o--o-xx-o": 4, "---x--oox": 5, "-ox--x--o": 4, "--x-xooxo": 1, "o-xx-oxoo": 4, "--xo-o--x": 4, "--x-xoo-o": 7, "xoxx-o--o": 6, "o--ox-xox": 2, "-xo-o-xo-": 5, "-xoox-xo-": 5, "xooo--x-x": 5, "-xoo-xxoo": 0, "oo---ox-x": 2, "o--oox-xx": 6, "-o-oxx-ox": 2, "xooo--xx-": 8, "ox-o--x--": 8, "o-oxx--o-": 5, "-o---x-xo": 2, "oo-ox-xox": 2, "ox--xx-oo": 3, "xooxo----": 6, "-oox-x-xo": 0, "-x--o-oox": 2, "xo-o-ox--": 4, "o-xo-xxoo": 4, "o--xxooox": 2, "--xxooxo-": 0, "--xoxoo--": 0, "oxoxxo-ox": 6, "o-ox-o--x": 1, "--x-o-ox-": 8, "-x-xo--o-": 0, "-xo--ooxx": 4, "xoxo--oxo": 4, "xoox---o-": 4, "o-xox--xo": 1, "o-oxx--ox": 5, "xoox-ooxx": 4, "xx--o-oo-": 2, "o-x-x-o--": 3, "--ooxoxx-": 8, "xxoo--o-x": 4, "o-xo-xxo-": 1, "-o-xx--oo": 6, "-xx-xoo-o": 0, "xoo-xx-oo": 3, "oo--xoxox": 2, "o-ox-o-xx": 1, "--xoxoo-x": 0, "o--oxo--x": 6, "xoxo--ox-": 8, "xo-ox-o--": 8, "oxoxx--oo": 5, "x-o---xo-": 8, "oo-xx--o-": 2, "o--xxoo--": 1, "xo-x-xo-o": 4, "xooo-xoxx": 4, "o--ooxxox": 2, "ooxo--xx-": 4, "-x-o-xoox": 2, "oxo-x-o-x": 3, "-oooxo-xx": 6, "x-x--oo--": 1, "x-xox-o-o": 1, "-ox-xooox": 0, "-xoo--xo-": 4, "xx-oxoxoo": 2, "ox--xoo-x": 7, "x-o-o-xo-": 3, "--x-o---o": 0, "-oxxoo--x": 7, "-ooo-xx--": 0, "xo-oxxo--": 8, "xx-o-ooox": 4, "o-o-xxx-o": 1, "xox-----o": 6, "-xx--o-o-": 4, "o--xo--x-": 8, "x-o---xoo": 3, "x--ooxoox": 2, "oxo-x-o--": 3, "xoo-xoxo-": 8, "-xo---xo-": 8, "-ooo-x-xx": 6, "o---o-xox": 1, "oxoxoxxo-": 8, "ooxo--xxo": 4, "o-x-x-oxo": 1, "x-x--oo-o": 1, "------xoo": 3, "xooo-xox-": 4, "oo-oxx-ox": 2, "o-x-xooox": 3, "--oox-ox-": 0, "--oxoo-xx": 6, "--x-o----": 1, "---xoo-ox": 1, "-xoo--xox": 5, "---oxo-x-": 0, "oo-xx-oox": 5, "xxooxxo-o": 7, "oox-o-xx-": 8, "xo-oxxo-o": 7, "xo-o-xx-o": 4, "oxoo--xx-": 4, "-o-x---xo": 0, "o----x-ox": 4, "x-o-o-xox": 3, "xooo-ox-x": 4, "xx--o-xoo": 5, "oo-----xx": 6, "xxoox-o-o": 7, "o----xoox": 3, "-oxo--oxx": 0, "x--oxo---": 8, "-o-ox----": 2, "x---o-x-o": 1, "xo-x-oo-x": 4, "x-xxooo-o": 1, "oxx-----o": 4, "ooxox--xo": 6, "--ooox--x": 6, "--xooxox-": 0, "x-oxxoo--": 8, "--xoo-o-x": 5, "oo---x-x-": 2, "oo-xxoox-": 2, "-xxo----o": 6, "-x-xoox-o": 0, "-o-----x-": 8, "-o--xo-x-": 8, "-xxxo--oo": 0, "-o-ox---x": 6, "-oxoxo-xo": 6, "-oxo--ox-": 0, "---oxxoox": 0, "xxoox-o--": 5, "x--oxo--o": 2, "----xo-ox": 6, "xxooxoxo-": 8, "oo-----x-": 2, "-oxx-o---": 6, "x-oox--o-": 8, "ooxox--x-": 6, "xx-ooxoxo": 2, "xxoooxx-o": 7, "-ox-xoox-": 8, "----oxoox": 2, "-o-xo---x": 7, "xox-xooxo": 3, "oxx-ox-o-": 8, "-xo------": 6, "ox--oo-xx": 6, "oox--x-o-": 4, "x-oxox--o": 6, "--x--oo-x": 0, "ox---ox--": 4, "----o-x--": 5, "x---x-o-o": 7, "xoxoo-o-x": 5, "ox--xoxo-": 2, "-o-xxo--o": 2, "-xxoxo-o-": 0, "x--oxoxoo": 2, "xx----oo-": 8, "o-o-xoxx-": 1, "xoox-o-ox": 6, "--oo--x--": 4, "-oo-xxoxo": 3, "-o--x-oxo": 0, "xo-oxxxoo": 2, "o---xxoxo": 3, "--xoxo-o-": 6, "ooxx--ox-": 5, "--oxx--oo": 5, "x-ooxx-o-": 8, "oox-ox---": 8, "ox--xo---": 7, "ox--xoo--": 7, "-xox-oo-x": 4, "xo-o--x--": 8, "--x-ox-o-": 1, "oxo---x--": 7, "----o-x-o": 0, "--oo--x-x": 0, "xo-ooxoxx": 2, "x---oxoox": 2, "-o-xxo---": 0, "-xxoxo-oo": 0, "--x--oo--": 4, "x--oxoxo-": 8, "o-xoxx-o-": 8, "-xoxo--ox": 6, "---o-xox-": 0, "--o-oxx--": 0, "----ox-ox": 2, "xox--oxoo": 4, "-ox-xo---": 0, "-oo-xxox-": 0, "ox----xo-": 2, "-o--x-ox-": 5, "o-x-oox-x": 7, "xx-ox-o-o": 2, "-o-x--ox-": 0, "-xx-ooxo-": 0, "-oxo--o-x": 5, "xoo---x-o": 3, "x-ooxx-oo": 6, "-xo-xo---": 7, "--xoxo-ox": 6, "xo-o--x-o": 4, "--oxxoo--": 8, "-ox-ooxx-": 8, "------ox-": 0, "-oxx-oxoo": 4, "o--xx-xoo": 1, "xo--x---o": 3, "xxoo--o--": 4, "o-o--o-xx": 6, "xx--o-oox": 2, "x---oxxoo": 3, "oo-xxoo-x": 2, "-xx--o-oo": 0, "--oox-xxo": 1, "xxo----o-": 6, "x--xoo-ox": 1, "oo-ooxx-x": 7, "-oxoo-xx-": 8, "--o--xo-x": 4, "oox-xxoo-": 8, "-x--xo--o": 7, "oo--x--ox": 2, "-oox-xxo-": 4, "-o--ox-x-": 8, "oox-x---o": 3, "x--ox---o": 2, "-oxx-oxo-": 0, "oxx-o-o-x": 5, "--oxxoo-x": 7, "-x-oxx-oo": 6, "x-ooxxoo-": 8, "-o--xoox-": 0, "-xoxxo-o-": 8, "xoxooxox-": 8, "o-x-o--x-": 8, "xoooo-xx-": 8, "x-xo-oo--": 4, "oo-x-xx-o": 4, "xx-oxoo--": 2, "-oxxoo---": 7, "--xo-oxox": 4, "--oxx-oo-": 5, "oxo-----x": 7, "xx--xo-oo": 2, "o---xoxox": 2, "-ooxx----": 5, "xxooox-o-": 6, "-o-oxxoxo": 0, "oxxx-o--o": 4, "-xoxxoo--": 8, "o-xo-x---": 6, "x-xxooo--": 1, "--xxooo--": 7, "oxx-o--ox": 5, "x-----xoo": 4, "-ox-o----": 7, "-oxx-o--o": 6, "o-o-oxx-x": 7, "oxx-xooox": 3, "-ox-xooxo": 0, "o---xoxo-": 2, "o-oooxx-x": 7, "--xooxxo-": 8, "xxooox-ox": 6, "oxxo---ox": 6, "x--oxx-oo": 6, "-ooxx---o": 5, "--oxx-oox": 0, "x-ox-o---": 8, "x-oo-ox-x": 7, "-x---xoo-": 8, "--o-xooxx": 3, "-ox-o---x": 5, "oxox-oo-x": 4, "-x-ox---o": 2, "oxx-o--o-": 8, "--xxooo-x": 7, "oo--x---x": 2, "xo-xo----": 6, "o-xo-ox-x": 7, "--o-x--xo": 5, "xoxo--x-o": 4, "o-xo-oxx-": 4, "o--o-xx--": 2, "xoox---xo": 5, "--xxo-xoo": 0, "xoo--x---": 3, "oo-o-xx-x": 7, "xxoo--oox": 4, "o--o--x-x": 1, "x-o-o-xxo": 3, "---x-o-ox": 0, "o-ox-xo-x": 4, "xxo-o--o-": 6, "ox--xoxoo": 2, "---oxo-ox": 2, "oxo--oxx-": 4, "oxo-xoxo-": 8, "o--x-o--x": 7, "oxxxxooo-": 8, "----x--o-": 8, "---oxxxoo": 2, "xxo---oxo": 4, "xoo-oxx--": 7, "x--xo-oxo": 2, "o--x-o-x-": 4, "x--xo--o-": 6, "x--o-xxoo": 2, "-xo-x--o-": 5, "oo-x-xo-x": 2, "x-o-oxx-o": 3, "x-ooxo---": 8, "--ooxxoox": 0, "oo-xoo-xx": 6, "o--x-----": 2, "o--o--x--": 7, "--o-x-x-o": 5, "x--ooxx-o": 7, "o--oo-x-x": 5, "---ox-o--": 0, "o----ox-x": 4, "xxo-o--ox": 6, "-xooxxxoo": 0, "---x-o-o-": 2, "xo-oxx-oo": 6, "--xxxooo-": 8, "oox-oxox-": 8, "-x-xo-xoo": 0, "ox--oxoox": 2, "o--x-o---": 2, "-o---x-ox": 4, "-xooxo-ox": 0, "oo-o-xx--": 2, "---xxo--o": 2, "-xoo----x": 7, "xo---o---": 6, "----x-oox": 5, "o---x-ox-": 3, "x-o-ooxox": 3, "-ooxxoxo-": 0, "--o-oo-xx": 6, "oo-xx-oxo": 5, "--o-o---x": 6, "--o-----x": 6, "o---x---o": 7, "-ooox--xx": 0, "oo-xxoxo-": 2, "-x-xooo-x": 2, "xx---o-o-": 4, "ox--o---x": 5, "x-x-o-oox": 5, "x-xooxoxo": 1, "xo----o--": 4, "-xxo-oox-": 4, "-ox--ooxx": 3, "o--oox--x": 2, "oxx-xo-o-": 6, "---xxoxoo": 0, "oxxoox-o-": 8, "--o---x--": 0, "----x-oo-": 8, "x-oxx-o-o": 5, "o-x-o-o-x": 5, "oo-xx-o--": 5, "-o-xoxox-": 2, "---oo-xx-": 8, "x---xo-o-": 6, "-o-xxox-o": 0, "xo----o-x": 4, "xxooxo---": 7, "--ox--x-o": 0, "xx---ooxo": 4, "o-x-o-oxx": 5, "----oxx-o": 0, "----x--oo": 6, "-x-oo-xo-": 5, "xo-xxo--o": 6, "xoox-----": 6, "oxx-xo-oo": 6, "-ooox--x-": 0, "--oo---xx": 0, "o-xoo-xx-": 8, "xx-xoo--o": 6, "--x-x--oo": 6, "xoo--o-x-": 8, "---oo-x--": 5, "oxo-oxx--": 8, "-o--x-xo-": 3, "oxox--oox": 4, "o-x------": 5, "o---xo-xo": 1, "--xx-oxoo": 4, "x---ooxxo": 3, "---xo-ox-": 2, "ooxxxo-xo": 6, "--xoxooxo": 1, "oxo-x-xoo": 5, "-oxo--xxo": 4, "xoo-oxxxo": 3, "--xo---xo": 1, "-oox-oxx-": 8, "---x-ooox": 4, "-x--o--o-": 8, "xo-x-o---": 8, "xo-o-x-o-": 4, "xoo--o-xx": 6, "xox-xo--o": 6, "xxoxo--o-": 6, "o-xo--x--": 5, "-o--x-xoo": 0, "-xo-ox---": 6, "-ox-o-xxo": 0, "-x--oo-ox": 3, "xxoo--xo-": 5, "o---xo-x-": 1, "-oo-xox-x": 0, "oxox-o-x-": 8, "-x--o--ox": 2, "--xoxoox-": 1, "x-o-xo---": 8, "oxo-x-xo-": 5, "o--xooxox": 1, "x--x-o--o": 6, "-x--x--oo": 6, "-x-oox-ox": 2, "-oxoxoo-x": 0, "o-xoo-x-x": 5, "-xoo-x-xo": 4, "o-ooxx-xo": 1, "--oxoo--x": 6, "oo-x-ox-x": 7, "-x-o---ox": 0, "--xox---o": 6, "xo--oo-xx": 6, "o-xoox-x-": 8, "-o--o--xx": 6, "--oo-xxo-": 1, "-xxo-o-ox": 0, "o-xoox---": 8, "---o-xxoo": 4, "----o---x": 2, "oox----xo": 4, "oo-xx-xo-": 2, "-o-ox-x-o": 2, "xoo-xxoxo": 3, "xx-o-xo-o": 2, "xo-x-o-xo": 6, "-o-x-----": 4, "-x-o---o-": 8, "-ox-xxoo-": 3, "-o--o--x-": 8, "xo--oo-x-": 3, "o-xx-x-oo": 4, "o-xo--xxo": 4, "ooxoo--xx": 5, "xoo-xo---": 8, "o-oxx-oox": 5, "x--o-----": 2, "oo--x-xxo": 2, "---o-xxo-": 2, "-o-ox-x--": 0, "oox----x-": 5, "x-xo--xoo": 5, "-oo-x--x-": 0, "oxxo---xo": 4, "-ooooxxx-": 8, "--o-xx--o": 1, "oo-xo-xx-": 8, "xoo---oxx": 4, "xox--o--o": 6, "x-o-x--o-": 6, "o-xxo----": 8, "xo-oxoxxo": 2, "-o--xo--x": 7, "-x-oxoo-x": 0, "-xxoxooox": 0, "-xxoo-oox": 0, "xoxox-o--": 8, "-xx-o--o-": 5, "x-x--oxoo": 4, "o-xxoox--": 8, "xo--oox-x": 7, "-xoox-o-x": 0, "--o--ooxx": 4, "-o--oo-xx": 6, "oox-xx-o-": 3, "--x-o-xo-": 1, "o-x-oo-xx": 6, "oo-x-oxx-": 8, "--o-xxxoo": 3, "-x-oxoo--": 0, "-o--xo---": 8, "o--xo-x--": 8, "x-oo--xox": 4, "x----oxo-": 3, "-o-xoxo-x": 2, "xoo---ox-": 4, "--ooxox-x": 7, "-o-x-o-x-": 6, "xoxox-o-o": 7, "--xo-xo--": 0, "ooxx-o-ox": 4, "-xx-o--oo": 0, "oox-o---x": 7, "-oxxx--oo": 6, "xoxox-oxo": 5, "-o-xx-xoo": 0, "-xx-o-oo-": 0, "---xx--oo": 5, "o-----x--": 2, "ox-o---ox": 6, "o--x---ox": 5, "-oox-xo-x": 4, "-xoxoo-ox": 6, "-xoox-o--": 7, "-oox-xox-": 4, "oxo---xxo": 4, "-x--ox--o": 0, "-----xxoo": 3, "xoxo---ox": 5, "o--xoo--x": 7, "-x-oo-xox": 5, "o---ox--x": 7, "xoo-oox-x": 7, "-x-o-x-o-": 2, "-o--x--xo": 3, "x-oo-xo-x": 4, "-o-xo-x--": 7, "--ooxo-xx": 0, "x---o---o": 2, "ooxo-x---": 8, "x-x-o-oo-": 1, "o--xxoxo-": 2, "oxxo--x-o": 4, "--xxoo---": 0, "-xxoox--o": 0, "xx-o--xoo": 2, "-ox-oxox-": 8, "xoo---xxo": 3, "xoxoxoox-": 8, "x-xo-x-oo": 6, "x-x-oooox": 1, "xoxxo-o--": 7, "x-oo-xo--": 4, "ooxo-ox-x": 7, "----o-o-x": 2, "xx-ooxxoo": 2, "oox-xo-ox": 6, "x---ox--o": 6, "--xx--o-o": 7, "o-ooxxxo-": 1, "xoxo---o-": 4, "-xx-oox-o": 0, "oxxxooo--": 8, "---o-xoox": 0, "-o---xox-": 3, "oxxox--o-": 6, "x---o----": 7, "xoxx-ooxo": 4, "-oxx--oox": 5, "oo--ox-xx": 6, "-oxxx-oo-": 8, "ooxxoox-x": 7, "-oooxx-ox": 0, "--xxoo--o": 0, "-ox-oxoxo": 0, "xo-xx-o-o": 5, "oxoxo--ox": 6, "xo-ox-ox-": 8, "--xoox-xo": 0, "x--o-xo--": 2, "---o--xo-": 4, "--oo-xxxo": 1, "xx-oo-o-x": 2, "ooxo----x": 5, "xx-o-xoo-": 2, "-oo-x-ox-": 0, "-xooo-x-x": 7, "o-o-x---x": 1, "xoooxxx-o": 7, "ox-oxox--": 7, "xo--x-oxo": 3, "oo--xxxo-": 2, "---xoox--": 7, "--oxo-xox": 0, "--x-xo--o": 0, "xoo-ooxx-": 8, "xo--x-o--": 5, "--oooxx-x": 7, "--o--xxo-": 3, "---o--xox": 4, "xo-ox-oxo": 2, "x-xooxoo-": 8, "xx-oo-o--": 2, "x------o-": 2, "-ooxxoxox": 0, "-xx-xooo-": 0, "-oxo--xox": 5, "--oooxx--": 0, "ox--x-oox": 3, "ox-oxox-o": 7, "-oo-x-oxx": 0, "---xoox-o": 0, "oo--xxxoo": 3, "xo--x-o-o": 7, "x--xxo-oo": 6, "ox-xoxoox": 2, "--xo--oox": 5, "-ox---ox-": 8, "xo-oo--xx": 5, "-xxo-xoo-": 8, "---ooxxox": 2, "o----xx-o": 4, "ox-o--xxo": 4, "o---x-o--": 3, "--xoo--x-": 5, "oxo--ox-x": 7, "--ox-xxoo": 4, "-ox--xo--": 3, "ox-ooxxo-": 8, "ox-xoooxx": 2, "o----o--x": 6, "x-o--x-o-": 4, "xox---oox": 4, "--o-x---o": 5, "xoooxoxx-": 8, "ox-x--oox": 5, "o---x-o-x": 3, "-ox--oox-": 4, "---ooxxo-": 1, "-ox---oox": 5, "-ox---oxo": 4, "---xoo-x-": 6, "xo-x--oxo": 2, "xo-oo--x-": 5, "-oo---x-x": 0, "x-o---o-x": 4, "xx--xooo-": 2, "xx-oo--o-": 2, "-x-xxoo-o": 7, "-oo-xooxx": 0, "o---oxxox": 2, "-oxxo-x-o": 0, "xxoo-----": 4, "x-----o--": 8, "ox-ooxxox": 2, "--xxooox-": 1, "oxxxoo-o-": 8, "oxo--xxoo": 4, "-oxo-ox-x": 4, "ox-oxo-ox": 6, "ox---oxxo": 4, "xoox-xo-o": 4, "--xxo-oxo": 0, "---o-ox-x": 7, "oxoox---x": 7, "-oooxxx-o": 0, "-xo-x-oo-": 8, "o-oxx-oxo": 5, "-xo-oxx-o": 0, "o-xx-o-o-": 4, "o--xx-o--": 7, "x-xoox-oo": 1, "x-x---oo-": 8, "-xooox-ox": 6, "-xx-ooo-x": 0, "-xxoo--ox": 5, "oxoox----": 7, "---o-ox--": 4, "-----o---": 2, "oxo--xxo-": 4, "-xo-x-oox": 5, "xoo-xxoo-": 3, "oxxxo-oox": 5, "--xoo-xox": 5, "----oxox-": 2, "xxo-oxxoo": 3, "xoox-xo--": 4, "oxxxoo-ox": 6, "-oooxxx--": 0, "ox---o--x": 7, "-oooxoxx-": 8, "xxo-xoo--": 8, "oxxoo--ox": 5, "--xo-x-o-": 6, "o-oxx-ox-": 5, "o--xx-o-o": 7, "x-xoox-o-": 8, "-xx--ooox": 0, "ooxo--xox": 5, "oo-oxo-xx": 6, "xx---oxoo": 3, "o-xo-oxox": 4, "-o-x-oo-x": 4, "xxo--oo-x": 4, "x-ooxox--": 8, "-oxoox---": 8, "o-xxxooox": 1, "xx-xo-oo-": 2, "--oox-xo-": 5, "-o---o-xx": 4, "x-xoo-o-x": 5, "-xxo-oxoo": 0, "o-o-xx-o-": 3, "x--oo-xxo": 5, "-o-xxooox": 0, "x-o-o----": 6, "xxooo-xo-": 5, "oxo-x----": 3, "xoxo-o--x": 4, "--x-o-o--": 0, "--o--x-xo": 4, "x---oo---": 3, "x-xoxo-o-": 1, "xo---ox--": 4, "-o--x-o--": 2, "-x-----oo": 6, "-xxo--xoo": 0, "oo-oxxx-o": 2, "-oo-xo-xx": 6, "xox-oox--": 3, "-xoxo-x-o": 0, "-o--o-x--": 7, "-xxx-oo-o": 0, "x-o-o---x": 6, "xxooo-xox": 5, "xxoo-x-o-": 8, "--ox-ooxx": 4, "-o---xoox": 4, "xo-o-xoxo": 2, "oxxxo--o-": 8, "x--o--x-o": 2, "-xo-x-xoo": 5, "x-xoo-o--": 1, "-o---o-x-": 2, "o-xxxooo-": 8, "oxo-x---o": 5, "o-o-xx-ox": 3, "-x--x-oo-": 8, "x-xoxo-oo": 6, "xoxooxx-o": 7, "-o--o-x-x": 7, "x-x-o-o--": 8, "xo---ox-o": 2, "-x-----o-": 8, "xox-o-o--": 7, "--xxo---o": 0, "--x-o-o-x": 1, "o-xx---o-": 4, "-xo---oox": 4, "----xo-xo": 2, "o----oxx-": 8, "--ooox-xx": 6, "o---xooxx": 1, "-oxxooxxo": 0, "-oxoxo-x-": 0, "-ox-xo-ox": 3, "o--oxo-x-": 6, "---oo---x": 5, "o---xxoo-": 3, "oxxx-oo--": 4, "oxooxx-ox": 6, "xo-o-x-ox": 2, "oo--o--xx": 2, "ox--xo--o": 7, "xooox--x-": 8, "o--------": 4, "o-o-x----": 1, "-xx-oxo-o": 0, "ox----xoo": 4, "-xo-xo-ox": 0, "---oxxoxo": 1, "xox--oxo-": 4, "-oxo----x": 5, "oox--oxx-": 4, "x--oxoo--": 7, "-o-oxoxxo": 2, "---oxox-o": 2, "x---ooxo-": 3, "-oxox-oox": 0, "x--ox-xoo": 2, "o-oo--xx-": 8, "oxooxx-o-": 6, "o---xxoox": 3, "o-x-o-xox": 5, "o-------x": 6, "xooxo---x": 6, "ox-xooo-x": 2, "o---xoox-": 1, "o--oxo-xx": 6, "oo--o-x-x": 7, "oox--xox-": 3, "-ox-xo-o-": 8, "xooox--xo": 5, "xoo-xx-o-": 8, "oo--xx---": 2, "o-xxoooxx": 1, "-o-oxoxx-": 0, "oox--oxxo": 4, "o-xxx--oo": 6, "x---ooxox": 3, "---oxox--": 2, "x-xoxooo-": 1, "ox-xx-oo-": 5, "-oxo-----": 8, "---x--o--": 7, "ox--ooxox": 3, "x-oo-x--o": 4, "---o-oxx-": 8, "x--ox-oxo": 1, "oo-xxooxx": 2, "ooxx-----": 7, "--xo-----": 0, "oxoo-xxo-": 8, "-oxo-xx-o": 4, "x---oxo--": 2, "---o-x--o": 6, "o-x-xo--o": 1, "-oo-oxxx-": 8, "x-x-oo-xo": 3, "xoo-o-x--": 7, "xoo-o-xxo": 3, "--x----o-": 4, "-xx-o-oox": 3, "x--xooox-": 2, "o--xx--o-": 2, "-oooxox-x": 0, "-x-oxoxoo": 2, "x-x-o--o-": 1, "-o-o---x-": 0, "x-xo--o--": 7, "x-oxo----": 6, "x-oo-x---": 8, "o-x--oxox": 4, "o-x-xx-oo": 6, "----xooox": 0, "xoo--x-xo": 3, "x-xxo-oo-": 1, "x--o-xoxo": 1, "oo--xxoxo": 3, "oxo-xoxox": 3, "x-x-oo---": 3, "xoo-x-o--": 3, "---xxooxo": 1, "---o-x---": 4, "o-x-xo---": 1, "oxoo-xxox": 4, "xoox-xoxo": 4, "--x--ox-o": 3, "xoo-o-xx-": 8, "-o-o---xx": 5, "xo-ox--o-": 2, "xoo-x--xo": 5, "x-oox-xo-": 8, "o--xx--oo": 6, "oo-xx-o-x": 5, "-x-oxoxo-": 8, "x-x-o--oo": 1, "xoo-o-x-x": 7, "x-xo--o-o": 1, "oo-o--xx-": 2, "-ox-xx-oo": 3, "ox--ox---": 8, "--oox-xox": 0, "-o-ooxx-x": 7, "xo-x-x-oo": 6, "----oo--x": 3, "x-x-oo-ox": 1, "xo-o-o-x-": 4, "-xo-oox-x": 3, "-xxoo--o-": 0, "--x---oox": 3, "oxoxo-x--": 8, "o-oox-x--": 1, "-xooxx-o-": 8, "xo--xxoo-": 3, "--oxooxx-": 8, "-xxo-x-oo": 6, "o--ox-x-o": 7, "-ooxo-x-x": 0, "--x-x-oo-": 8, "-oxoo--xx": 5, "o--oxoxox": 2, "xx-oo----": 2, "x-oo-x-xo": 6, "x-x-oo-o-": 1, "-o---o--x": 6, "o-o-xxox-": 1, "-xx-ox-oo": 0, "xooxxo---": 6, "ox--xxoo-": 3, "----o-xox": 1, "-o-xox-xo": 0, "xo-xx-oo-": 5, "-o-ooxx--": 7, "xo-o---xo": 2, "-xooxx-oo": 6, "-----o-ox": 1, "-x---ooox": 0, "o--ox-x--": 2, "---x--xoo": 5, "-oxo-o--x": 4, "o-oxxo-x-": 1, "o-oox-x-x": 7, "--ooxxxoo": 0, "oox---x--": 8, "-x---o-o-": 6, "ooxo-xxo-": 4, "-xooxxoox": 0, "-o-ooxxx-": 8, "o-x--oxxo": 4, "--xx-oo-o": 7, "o-x---o-x": 5, "ox-o-oxx-": 4, "-o--xxoox": 3, "ox--x--oo": 6, "xo--xx-oo": 3, "xooxoo--x": 6, "--x-oo--x": 3, "xxo-x-o-o": 7, "xoxxoo---": 6, "--x-oo-x-": 3, "x-x-oox-o": 1, "ooxoxo--x": 6, "ooxo---xx": 6, "xx-oo--xo": 2, "xoxoxoo--": 8, "---oxx--o": 1, "-o--xxoo-": 3, "o-xo---ox": 5, "x-------o": 6, "--ox-o-ox": 0, "-o-ooxxxo": 0, "---ox-xoo": 2, "--xx-oo--": 8, "ox-xo-xo-": 8, "ooxxxoox-": 8, "-x---o-ox": 0, "ox--x--o-": 8, "oox--xo--": 3, "x-oxoo---": 6, "--o-o--xx": 6, "-x-x-o-o-": 0, "----oo-xx": 6, "x-oxoo--x": 6, "-o--xoxxo": 2, "--x-oo---": 3, "xoxxoo--o": 6, "x--o--xoo": 2, "xoooxx-o-": 8, "xx-o--o-o": 2, "x---o--xo": 3, "xxo-ox--o": 6, "-ox-x---o": 5, "xo--o-xxo": 3, "xoxoxxo-o": 7, "o-xx-ox-o": 4, "oxox--o-x": 4, "x-ooo-xx-": 8, "x-ox--oox": 4, "-o-oox--x": 7, "-o-xxoox-": 2, "--oxoox-x": 7, "o---xo---": 8, "-----xo--": 8, "----x-oxo": 5, "oo-xx-xoo": 5, "-o-x--x-o": 0, "-ox-oox-x": 7, "-xo--xo--": 4, "xx-o--o--": 4, "x-o-xxo-o": 7, "ooxxoo--x": 7, "--o-xxoxo": 3, "-x--oox--": 3, "--xx-ooxo": 1, "oox---ox-": 3, "-oxox-oxo": 0, "x-o--ox--": 8, "-xoo-xox-": 4, "x---oox--": 3, "oxo-xx-o-": 3, "-x-x---oo": 6, "ooxxoo-xx": 6, "-ox------": 8, "--oxoox--": 0, "xo-xxoo--": 8, "oxxx-ooxo": 4, "ox--ooxx-": 8, "----o--x-": 0, "xo--oxoxo": 2, "oxo-o--xx": 6, "ox--o-xo-": 8, "-xoooxxo-": 0, "-oxxxooo-": 8, "---oxoox-": 1, "ox-o--xox": 2, "-oo-x-xo-": 0, "o-xoxo-xo": 6, "o--oxox-x": 7, "--oo-x---": 0, "o-xo-xx-o": 4, "xox--ooxo": 3, "xoo-xoox-": 8, "-ooxoo-xx": 6, "-xoxx-o-o": 7, "-ooxx-xoo": 0, "xoxxoo-xo": 6, "o-ox-x-ox": 4, "x---oo-x-": 3, "oox-xx--o": 6, "--oo-x--x": 0, "ox-o--xo-": 8, "xoooxox--": 8, "oox---o-x": 5, "-oo-x-xox": 0, "xo--x-xoo": 3, "--xoxooox": 0, "o-xoxo-x-": 1, "-xoooxxox": 0, "ox--o-xox": 2, "-o-xx---o": 5, "--xxoox-o": 0, "x--x-oo--": 2, "ox-xox-o-": 8, "o-xoo-xox": 5, "xooo-xx--": 8, "-oo--o-xx": 0, "o-ox-ox-x": 7, "oo-ooxxx-": 8, "-o--xox--": 2, "xxo-ooxox": 3, "ox-oxo---": 6, "-o---oxox": 4, "xo-oo-x-x": 7, "oox-xx-oo": 3, "-oxoxx-oo": 6, "x-ox-ooox": 4, "-oo-xxo-x": 3, "oxoxx-o--": 8, "x--ooxo-x": 2, "---xoo---": 8, "--o-xoxox": 0, "xo-ooxxxo": 2, "ox-o-ox-x": 7, "o-o-xxoxo": 3, "o-x--o---": 3, "-xooxxo-o": 7, "-x-o-o-ox": 4, "x-o-o-x-o": 3, "o--xx-oxo": 2, "o-o-xooxx": 1, "o---xo--x": 6, "ooxxx-oo-": 5, "x-o----xo": 5, "oxo-xxoo-": 3, "xx-oo-xoo": 2, "xx-o-x-oo": 2, "x--o----o": 2, "x--ooxo--": 2, "---xoo--x": 6, "oox-xo--x": 6, "o--o-ox-x": 4, "o--x-xoox": 4, "oxo--o-xx": 3, "-oxoxx-o-": 0, "-x-ooxx-o": 0, "oxoxx-o-o": 5, "xoo--oxx-": 3, "ooxxx-oox": 5, "-xox--xoo": 5, "x---o-ox-": 2, "o-x---xo-": 5, "ox-o-o-xx": 4, "o--xooxx-": 8, "oxo-xxoox": 3, "--o-xoxo-": 8, "xx-oo-xo-": 2, "oxx-oxo--": 8, "o-x--o--x": 6, "o---xoxxo": 1, "oxx-o-oox": 5, "oxoox-x-o": 7, "-o-xoo--x": 7, "-xx-ooox-": 0, "x-oo----x": 7, "--ox-oo-x": 4, "xx---oo--": 2, "-xxxo-o-o": 0, "o-o---x-x": 7, "-xooxox--": 7, "oox-ooxx-": 8, "-x-o-xoxo": 4, "xo----ox-": 4, "---xo-o-x": 2, "o--xxo--o": 2, "-o-ox--ox": 0, "-o-oxxox-": 0, "o-x-xoox-": 1, "x-ox----o": 6, "x-o-x-xoo": 3, "xx-o-oo--": 4, "------o-x": 0, "xo---o-xo": 2, "xooo--xox": 4, "x---ox-o-": 1, "x----xo-o": 7, "xooox-x-o": 5, "xo-xoo---": 6, "o--o--xx-": 4, "oxo-xoo-x": 7, "oxoooxx--": 8, "oox---oxx": 5, "-x-xoo---": 0, "xoxo-ox-o": 4, "ooxx-x--o": 4, "--xo--x-o": 1, "-o-o--xx-": 4, "-o--xxoxo": 3, "ooxxx--o-": 6, "o-x-ox---": 8, "-xx-oooxo": 0, "o---xx-o-": 3, "xxox-ooox": 4, "----oxo-x": 2, "-xoxx--oo": 5, "xx--ooox-": 2, "xo----oxo": 2, "-o-o-xxxo": 0, "--oxxo-ox": 0, "xxo--oxo-": 8, "o--xxo---": 2, "ooxox--ox": 6, "x-oo-----": 4, "o-xx-xoo-": 4, "x-ox-xo-o": 4, "oxoooxx-x": 7, "o-xxxo-o-": 6, "o--ox--ox": 6, "oxooxx---": 7, "ox-xxoxoo": 2, "xo-xoo--x": 6, "x-xooxo--": 8, "-ooxx-x-o": 0, "xo---o-x-": 6, "o-o-x-ox-": 1, "o--o--xxo": 4, "-o-oxxo-x": 2, "------o--": 4, "ooxxxoo--": 8, "ooxoxx-o-": 6, "xo--xo--o": 2, "oxx--oox-": 4, "---ox-xo-": 8, "--oo--xox": 1, "xxoox-oo-": 8, "x---xoo--": 1, "xx----o-o": 7, "-ox-o-x--": 7, "x-oox----": 8, "xooo--oxx": 4, "xox--oo--": 4, "-ox--oxox": 4, "xx-oox--o": 2, "o---x----": 7, "----ox-xo": 0, "oxo---xox": 4, "-ooxx-xo-": 5, "x-ooxx--o": 1, "xx-x-o-oo": 6, "-oxox-ox-": 0, "xoxoo--x-": 5, "-xo----o-": 6, "-oox-ox-x": 7, "x--o--ox-": 1, "ooxx-ox-o": 4, "o-x---x-o": 4, "-xooo-xx-": 8, "ox-oxx-o-": 6, "oxooxxx-o": 7, "oxoxoox-x": 7, "xox--oo-x": 4, "o-x-xxoo-": 3, "x-oox---o": 5, "ox-xx--oo": 5, "-x-oxo--o": 2, "o--xxo-xo": 1, "-xo--o-ox": 3, "xo--o-ox-": 2, "-xoox-x-o": 7, "-xo----ox": 3, "oxo---xo-": 4, "-x-xooxoo": 0, "-xo-xooox": 0, "xx--ooxo-": 3, "x--xo-o--": 2, "xo----x-o": 4, "----xox-o": 2, "-o-x-oxox": 4, "xoox-o---": 8, "x---x-oo-": 8, "x-oo-xox-": 4, "oxo-o-xox": 3, "xox-xooo-": 8, "xo-oxoox-": 8, "-o-xxo-o-": 0, "oo-oxx--x": 2, "---x-oxo-": 2, "-x----xoo": 2, "oxxx-ooox": 4, "-ox-oooxx": 3, "--xoo----": 5, "oxoxo-xo-": 8, "o-x--xxoo": 4, "-oxox--ox": 5, "-oo--ox-x": 7, "--xooxo--": 0, "x-xxoo-o-": 6, "---oo--xx": 5, "-xoo-o-xx": 4, "oox-xooxx": 3, "-x--xoxoo": 2, "---o-----": 5, "-o-xxo-ox": 0, "xooox----": 8, "ooxxox---": 8, "--x-xooo-": 8, "xo---xxoo": 4, "-xxxoo--o": 0, "xoox-o--x": 4, "-x--o-o-x": 2, "-oxx--o--": 5, "o--x--o-x": 5, "oox-xoox-": 3, "xxooxxoo-": 8, "o-x----xo": 4, "---o----x": 6, "--xoo---x": 5, "x-xxoo-oo": 6, "--xo-o-ox": 4, "--oxx---o": 5, "x-xoxoo-o": 1, "-ooxxooxx": 0, "oxo-ox-ox": 6, "oxoxo-xox": 5, "x-xxo--oo": 6, "-oxox--o-": 8, "----xo--o": 2, "xoo-ox-x-": 6, "-ooxx--ox": 0, "o--o-x---": 6, "x-xox-oo-": 1, "o--ooxx-x": 2, "-oxxo--xo": 0, "ox-x-ooox": 2, "--xox-oox": 5, "o-o---xx-": 1, "oxxoox---": 8, "-x-o-xo--": 0, "o---x-x-o": 3, "-oxxo-oxo": 0, "xo--ox---": 7, "xo--ox-xo": 3, "o-x-xo-xo": 6, "---oox--x": 6, "x-x-ooxoo": 3, "xoxo--xoo": 4, "oxo-o-x-x": 7, "o-x-xooxo": 1, "xox----o-": 4, "ox-xooxo-": 8, "x-xxoooox": 1, "x-o--o--x": 4, "o-xxoo-x-": 8, "---oox-x-": 8, "xo--o----": 7, "oox---xox": 5, "x-o-oox--": 3, "o--ooxx--": 8, "xoo-ox-xo": 6, "xoxo-oxxo": 4, "----xo---": 2, "-oxx-ox-o": 4, "xo-x----o": 4, "xxooox---": 6, "-x-ox-xoo": 2, "-oo--xx--": 0, "-ooxx--o-": 5, "o--o-x--x": 2, "xxoooxxo-": 8, "-o--o---x": 7, "oxoo--x-x": 7, "-oxxo-ox-": 8, "-oo-o--xx": 6, "-o--xxxoo": 2, "x-xxoooo-": 1, "xo--o---x": 7, "x-xo-ooox": 4, "xoxo--xo-": 4, "---oox---": 6, "----o----": 6, "o--xoxo-x": 2, "-ooo-xxx-": 8, "oox-o-oxx": 5, "xo-----o-": 4, "-xxoxo--o": 0, "--o-x-xoo": 5, "x--xoo-xo": 2, "x-xoo--ox": 5, "--ooxx--o": 1, "xx-oxoo-o": 7, "x-xox--oo": 6, "x---o-xo-": 1, "oxox-o--x": 7, "--xoxo--o": 7, "o-ooxo-xx": 1, "--ooxooxx": 1, "xo-xo-oxo": 2, "---ox-o-x": 0, "--o--x---": 4, "oox-x-o--": 3, "x--xo---o": 2, "xooo---x-": 8, "o-x---oox": 5, "xo-oxx-o-": 6, "---ox-ox-": 1, "--o-oxxxo": 0, "-oxx-o-o-": 4, "x----oxoo": 3, "-xoxo----": 6, "o--x-oo-x": 1, "xxoxo---o": 6, "--o-x-xo-": 0, "o-o-xoxox": 1, "xxoo-xoxo": 4, "oo-xoox-x": 7, "-o-ooxoxx": 2, "xo-----xo": 3, "o----o-xx": 4, "--ooxx---": 6, "x-xoo--o-": 1, "x---o-xoo": 1, "--xoxo---": 0, "xooo---xx": 6, "o-xxooox-": 8, "-o--ooxx-": 8, "o--o-xxox": 2, "o----xo-x": 2, "--oxoxx-o": 0, "o-o--ox-x": 7, "---ox-oxo": 1, "o-x-oxxo-": 8, "oox-x-o-x": 5, "-oo-xx--o": 3, "x--ox--oo": 6, "x-xo--oxo": 1, "ox-oo-xox": 5, "ox-ox-xo-": 2, "x--o---xo": 1, "ooxx---xo": 4, "---xoxoox": 2, "-xo-xx-oo": 3, "xo-xx--oo": 6, "--xxxo-oo": 6, "-ox-oo-x-": 3, "o------x-": 6, "xooxo--x-": 6, "--oxx-oxo": 1, "xooo-xxxo": 4, "ox-xoo-ox": 6, "x-oxxo-o-": 8, "-ooxxo-ox": 0, "x-oooxxox": 1, "-ox-ox---": 8, "xxoo--ox-": 4, "x--ox--o-": 2, "-oo-xx---": 3, "oo-xx-ox-": 5, "---x----o": 6, "ooxx--x-o": 4, "ox-ox-xoo": 2, "xox-oxoxo": 3, "o-xoxx--o": 6, "-xox-oox-": 4, "o--xxoo-x": 2, "--x--ooox": 0, "o--o--xox": 2, "-----oo-x": 3, "ox-x-o-ox": 2, "ox-o-xxoo": 4, "xooxo--xo": 6, "-xo--ox--": 8, "xo--oxx-o": 3, "x-o-x---o": 5, "oxo-xx-oo": 3, "oo--xx-xo": 3, "-ox-oo-xx": 6, "-oxxooo-x": 7, "---oxoxxo": 2, "-oo-xo--x": 0, "--xxoooxo": 0, "o-x-o---x": 7, "oo-oox-xx": 2, "--x--o-ox": 4, "-ox--oxxo": 4, "x---oo--x": 3, "oox--xxo-": 4, "---ooxxxo": 0, "o--xoox--": 8, "oxx---xoo": 4, "xoo--ox--": 3, "-o-o-xx-o": 4, "-xxox-oo-": 0, "x-x-o-oxo": 1, "oox-x--xo": 3, "oox--xx-o": 4, "-o--o-oxx": 2, "x--xxooo-": 8, "-x--oo--x": 3, "-oo-xx-xo": 3, "-o----xo-": 4, "xox-o-x-o": 3, "xoo--ox-x": 3, "o--xoox-x": 7, "xx-o--oxo": 4, "--xxxoo-o": 7, "oxxxooox-": 8, "--x--o-o-": 0, "xoo-x--o-": 5, "o-x-o----": 8, "--o----ox": 1, "xoox---ox": 6, "x-o---ox-": 4, "-x-oox---": 2, "xx--o--oo": 6, "ox-xo-o-x": 2, "--oxxoox-": 1, "xo--oxo-x": 2, "x-x-o-o-o": 1, "o-xx-ooox": 4, "o-o-xo-xx": 1, "o---xox--": 8, "--ooxxo-x": 0, "--x-ox--o": 0, "----oox--": 3, "ox---x-o-": 4, "o----oxox": 4, "x-oox-o--": 7, "--xoo-oxx": 5, "xox--o---": 6, "o--xo---x": 2, "oo-xx---o": 5, "--oxxoxo-": 0, "x-oo-o-xx": 6, "--o-xoo-x": 1, "-ox-ox-xo": 0, "oo-xx-x-o": 2, "-xooxo--x": 0, "xoooxo-x-": 8, "-oxxoo-xo": 0, "x--ox-oo-": 8, "--oo-o-xx": 4, "---x--oxo": 4, "x-oo-oxox": 4, "x-ox-o-ox": 4, "o-xxoo-ox": 1, "xoox--ox-": 4, "--ooxxo--": 0, "o-o-xo-x-": 1, "ooxxxoo-x": 7, "x-oooxxxo": 1, "-xxox--oo": 6, "oo-xx----": 5, "-xooxo---": 7, "x-x-xoo-o": 1, "oxoxx-oo-": 5, "o-xxxooxo": 1, "ox-x-oox-": 4, "-oxxoo-x-": 6, "--oxo--x-": 6, "oxxxxo-oo": 6, "o-xxox-o-": 8, "oox---xxo": 4, "xxoxoo-o-": 6, "xo-oxxoo-": 8, "-o-oo-x-x": 7, "oxoxxoo-x": 7, "o-x--xo--": 3, "---xo-xo-": 0, "o--oxxxoo": 2, "xo-oo-xxo": 5, "xx--o--o-": 6, "--o--ox-x": 0, "oxx-xo--o": 3, "--xxo-o--": 0, "-xxooxo--": 0, "--xxo--o-": 1, "oo--xxoox": 3, "xo-xoo-xo": 6, "-xxxoooxo": 0, "oxoxxoo--": 7, "-xo-x---o": 5, "x-ooxoox-": 8, "oxoxx-xoo": 5, "xxoox--o-": 8, "o-xox--ox": 6, "---xo-xoo": 0, "xxoxoo-ox": 6, "o-o-x-xo-": 1, "x-oo-xxoo": 1, "xo-oo-xx-": 8, "xxoo-xoox": 4, "xo-o-xoox": 2, "o-o-x--x-": 1, "ox--x-o--": 7, "oxoo-oxx-": 4, "-xxooxoo-": 8, "---o-o-xx": 6, "xo-xoo-x-": 6, "o-oxxo-ox": 1, "xox-xo-oo": 6, "--ox--ox-": 4, "xo-oox-x-": 8, "---oxoo-x": 0, "oxoxoo--x": 6, "-xxo-o-xo": 4, "xoxxooox-": 8, "ox-------": 6, "-ox--oo-x": 4, "xoxoo-ox-": 5, "o---o-xx-": 8, "oxx-x--oo": 6, "-oo-xoxox": 0, "-x-o--xo-": 2, "-x-ox-o-o": 7, "ooxxoo-x-": 8, "x-xoo-x-o": 1, "o--xxox-o": 2, "--xoox---": 8, "--ox-o-x-": 8, "x-o-xoox-": 8, "xx-ooxoox": 2, "xoxox---o": 6, "----x-o--": 2, "xoooxx-xo": 6, "oxxo-oxox": 4, "-xox-o-ox": 0, "xxo-xo-o-": 8, "o-xxo--ox": 5, "xxo-ooxo-": 3, "oxxo-xxoo": 4, "-oooxx--x": 0, "xx-o---o-": 2, "oxoxox-ox": 6, "oo-ox--xx": 6, "-oo-oxx-x": 7, "x--o--o--": 1, "-x-o--xoo": 2, "xo---xoox": 2, "-o-xx--o-": 0, "-x-ox-o--": 0, "x-ooox-xo": 6, "--xoox--o": 0, "ox--xox-o": 7, "-oxoxo---": 6, "xo-o-x-xo": 6, "o--xo-xox": 1, "x--o--o-x": 5, "-oo-ox-xx": 6, "xoo---xo-": 3, "-oo-xxxo-": 3, "ox-xo--ox": 2, "xxoo-oxox": 4, "-x-oo--ox": 5, "xx-xoo-o-": 6, "-oox-o-xx": 6, "-x--ox-o-": 2, "o-oxx-x-o": 5, "xoxo-----": 4, "-xx-oxoo-": 0, "x--xooo--": 2, "--ox-ox--": 0, "xox--oox-": 4, "o---x--o-": 3, "-xxxoo-o-": 0, "-oxxxooox": 0, "-xoox----": 8, "o--x--xoo": 4, "---x-oo--": 2, "oxoxx--o-": 5, "ox-o-x-xo": 4, "oox-xxo-o": 3, "ooxo-xxxo": 4, "-xxoox-o-": 8, "ox-oxoxox": 2, "-o---oxx-": 8, "oo-xxo-ox": 2, "-o-o-xxo-": 4, "x--xooo-x": 2, "-ooxo--xx": 6, "-ox-o--xo": 0, "xooo-xxo-": 4, "o-oxx-o--": 5, "xo-x-o-ox": 4, "xoxo----o": 7, "ooxxo---x": 5, "xo-o-ox-x": 4, "-xxxoo-oo": 0, "---xoooxx": 2, "-xoox---o": 7, "--oxxo-o-": 8, "o--x--xo-": 4, "-oo-xxxoo": 3, "ox-oxoxo-": 2, "oxo-x-x-o": 5, "x-o-ooxx-": 8, "-xxoox-oo": 0, "---x-oo-x": 1, "-oxoxo--x": 7, "oox-----x": 4, "o-xx-oox-": 4, "-x--xoo-o": 7, "xo--xo-xo": 2, "-oo-x---x": 0, "o-oxoo-xx": 6, "oxoo--xox": 4, "-oxo-o-x-": 4, "-xx---oo-": 8, "-xoxooxo-": 0, "x-o--xoox": 4, "xx--xoo-o": 7, "o---x-oox": 3, "xoox-o-x-": 6, "--xox--o-": 8, "xo-o-xo-x": 7, "oo-o-x-xx": 6, "-xx-o---o": 0, "o-xx-xo-o": 4, "-o--ox--x": 7, "xo-oxoxo-": 8, "xox-xo-o-": 3, "x---o-o--": 2, "-----oxox": 4, "-oxx-ooox": 4, "xoo-xxo-o": 3, "-x--o-xo-": 0, "--x-xo-o-": 8, "ox-ox--ox": 6, "---xx-oo-": 5, "oxxoxo--o": 6, "-ox-xxo-o": 3, "-xoxooxox": 0, "-x--xoo--": 8, "xoooxxox-": 8, "o-ox-xx-o": 4, "o-xx--oxo": 4, "ooxx-o---": 8, "x-oo--x--": 8, "oox------": 8, "-ooxx-oox": 0, "-oxo-o-xx": 6, "-oo-x----": 0, "-o--xx-o-": 0, "-o-oo-xx-": 8, "----o--ox": 1, "ox-ooxx--": 8, "o-o-x--xo": 1, "o--xoooxx": 2, "---o-xx-o": 4, "xo-ooxx-o": 7, "x--o-oxo-": 4, "xo-o-xo--": 8, "-x--o-xoo": 0, "-xoooxxxo": 0, "xx-o-o-xo": 2, "oo--xx--o": 2, "oxxoxo---": 6, "--x-xo-oo": 6, "xxo-o-xo-": 3, "-xoxoox--": 0, "x--oxxo-o": 7, "x----oox-": 1, "-o-oxxxoo": 2, "o--xx-oo-": 5, "-xo-oxxoo": 0, "-oxxoxo--": 8, "x-oox--xo": 1, "-o-oxox-x": 7, "o--oxx--o": 6, "x-ooxxoxo": 1, "xx--ooxoo": 2, "xx--oo---": 3, "xo-xox--o": 6, "-oxxxooxo": 0, "-----o-x-": 4, "x-x-oxoo-": 8, "o-o-xxo--": 3, "-xxxoooo-": 0, "oox--ox-x": 3, "--oox---x": 1, "--xxoo-ox": 1, "-o-xx-o--": 5, "xxo-o-xoo": 3, "-o-o-xxox": 4, "-o-oxox--": 0, "o--xx-oox": 5, "-xxoo-o-x": 0, "oxxxo-o--": 8, "x----oo-x": 4, "x-xo-o--o": 4, "--xoo-x--": 5, "o-xxxoo-o": 7, "-xxxoooox": 0, "o-o-xxo-x": 3, "o--oxx---": 6, "-o--x--ox": 3, "-oxxoxoxo": 0, "xo-ox-xoo": 2, "--xxoo-o-": 1, "--oox----": 6, "-o--xo-ox": 0, "--xxooxoo": 0, "oo-oxox-x": 7, "x-xx-o-oo": 1, "o-o-xo--x": 1, "-x-x-ooxo": 4, "-------o-": 8, "ox---xoox": 3, "-o-xx-o-o": 5, "xxoo---ox": 4, "xo-o---x-": 8, "-oxx-xo-o": 4, "--xx-o-oo": 6, "o--x-ox--": 8, "--ox-xoox": 4, "-ox----ox": 4, "oxo-x--ox": 5, "-xoo-oxox": 4, "ox-xxooo-": 8, "oo--oxx-x": 7, "x--o-xoox": 4, "x--x-ooox": 2, "xooooxxx-": 8, "-oxxxo-oo": 6, "x-x-ox-oo": 1, "-o-o--x--": 8, "x--oox--o": 6, "ooxx-xoo-": 8, "x-x--o-oo": 1, "xooxx--oo": 5, "--o-x-o--": 3, "-xo---o-x": 4, "xx-xo-o-o": 2, "-oo-o-x-x": 7, "-o-oxx--o": 6, "xooxx-o--": 8, "-xox--oox": 4, "---xooxox": 0, "x--o-xoo-": 8, "--x-oxo--": 0, "xo--x--oo": 6, "ooxooxxx-": 8, "---o--x--": 7, "ooxxxo---": 6, "-xx-xo-oo": 6, "-----xoxo": 4, "ooxxx-oxo": 5, "ox-xxooox": 2, "o--o-oxx-": 4, "-x-ooxxo-": 2, "--o-xo--x": 7, "-ox----o-": 4, "xo--xoo--": 8, "oxxox---o": 7, "xxooxx-oo": 6, "--o-x-o-x": 3, "--xo-x--o": 6, "xx-oo--ox": 2, "---xooxo-": 0, "-oxxx-o-o": 5, "---x---o-": 6, "x-x--o-o-": 6, "xooxx--o-": 5, "x--oox---": 2, "----x-o-o": 7, "o-xxx-o-o": 5, "---xxoo--": 1, "oxx--oxoo": 4, "-o-oxx---": 8, "xooxx-o-o": 5, "-o-o--x-x": 0, "xoox----o": 5, "-o------x": 4, "xxoo-x--o": 6, "-xo-xoxo-": 8, "xox---xoo": 4, "-o-xxooxo": 2, "ooxoxx-xo": 6, "--oooxxxo": 0, "o--ox--x-": 6, "-xo---xoo": 5, "o-x--x-o-": 6, "ooxoo-xx-": 8, "---x-o-xo": 2, "-xx-oo---": 0, "--oxxo---": 8, "ox-oxo--x": 6, "x-x-ooo--": 1, "oo-oxxxo-": 2, "o-oox--xx": 6, "xo--o-x--": 7, "oo--xo-xx": 6, "x-o--x--o": 3, "ox--oox-x": 7, "x-xoo-oox": 1, "--x-o--xo": 0, "--x-oxoxo": 0, "--o-o-x-x": 3, "--o--xx-o": 3, "------x-o": 0, "oxo--oxox": 3, "o-x-x---o": 3, "oo-x-o--x": 2, "-ooxo-xx-": 0, "o--ox--xo": 6, "o--oo-xx-": 8, "-o-------": 2, "xoox-x--o": 7, "oox-o-x-x": 5, "-xx-oo--o": 0, "oo--xo-x-": 2, "o-oox--x-": 1, "-x--x-o-o": 7, "-xxo--oxo": 0, "o-xx----o": 4, "xxoo--oxo": 4, "ooxoo-x-x": 5, "-x-xoo-ox": 0, "ox-xooxox": 2, "xo--o-x-o": 3, "oxo--x-ox": 4, "oo-oxxxox": 2, "x-x--ooox": 4, "oxoo-xx-o": 4, "x-xo-xoo-": 8, "----xxoo-": 8, "ooxxxo--o": 6, "o-x-o--ox": 5, "ox-xoo--x": 7, "ox--xo-o-": 6, "x-ooo-x-x": 7, "o-oxx-o-x": 5, "oxxoo---x": 5, "x-xoox--o": 1, "--o-ox--x": 6, "xxoo---xo": 4, "-o----xox": 4, "x-oo-xx-o": 4, "o-o-xx-xo": 1, "ox-x--o--": 4, "-o-ox--x-": 8, "xxo-oox--": 3, "o-xoxo---": 6, "o--xxoxoo": 2, "xooo-xxox": 4, "x--o-x-o-": 2, "xooxx-oxo": 5, "oxo-xo-ox": 3, "xoo-xo-x-": 8, "---oxooxx": 0, "oxoo-xx--": 7, "ox--xo-ox": 3, "o-x--ox--": 7, "-oo-xxo--": 0, "x-x-o-xoo": 3, "oo-xxox--": 2, "xo-xoxoxo": 2, "oxo---oxx": 4, "-ox-xo--o": 6, "xox--o-xo": 6, "o--oxox--": 1, "oxx-oo-ox": 3, "o-xoxo--x": 6, "o------ox": 4, "oxox--xo-": 8, "-o-ox--xo": 2, "xoxxo---o": 6, "o-o-x-xxo": 1, "-xx-oooox": 0, "x--o-x-oo": 6, "xx--oooox": 2, "x-x----oo": 6, "xxooox-xo": 6, "o-oxxoo-x": 1, "o-oxx---o": 5, "-oxoo-x-x": 7, "-ooox-xx-": 8, "xo-oxxoxo": 2, "o-oo-xx-x": 7, "-oooxxxo-": 0, "xoxo--o--": 8, "-oxxxo-o-": 0, "ox-xoo-x-": 8, "---oxo-xo": 2, "xoxxxooo-": 8, "-----o--x": 7, "oo-xooxx-": 8, "-x----oox": 2, "oo-xx--xo": 2, "xo--xxo-o": 3, "--o-xxo-o": 7, "--oo--xx-": 8, "o-oox-xox": 1, "-xo-xxo-o": 7, "o-ooxxxxo": 1, "o-oxx----": 1, "xoxo--o-x": 5, "-oooxxxox": 0, "o-o-ox-xx": 6, "-o-oxxoox": 2, "--x-o-oox": 5, "xox-ooxxo": 3, "o-xx-o-xo": 4, "x--o--xo-": 4, "ox---ooxx": 4, "o-oo-xx--": 1, "--ooxxoxo": 1, "--xxoo-xo": 0, "-------ox": 5, "o-ooxoxx-": 1, "-x-x-oxoo": 2, "xo--x--o-": 2, "oxxx-oxoo": 4, "x--o-oxox": 4, "--oox-oxx": 0, "xoxoxo-o-": 8, "-x----oo-": 8, "--ooxox--": 8, "ox--oxxo-": 8, "-----ooxx": 4, "x-oo-x-ox": 4, "-ooxxoo-x": 0, "xo-o-xxoo": 4, "-----x-o-": 4, "xx-xoo-oo": 6, "xo---oox-": 2, "x-o-o-x--": 3, "xooxxo-o-": 6, "oo--xx-ox": 3, "-xoo-x-ox": 6, "ooxx-oo-x": 4, "ox----x-o": 4, "-o-xxoo--": 2, "xoo-xx--o": 3, "xoxo-ooxx": 4, "xooo-xo-x": 4, "-xo-ox-ox": 6, "xo--xo-o-": 8, "oo--x-xo-": 2, "-ox-x-oox": 5, "o--ooxxx-": 8, "--ox-o--x": 7, "ooxxo-oxx": 5, "---ox--o-": 8, "oo-ox-x-x": 7, "-x--oxoox": 2, "oxo-ox--x": 6, "-xxx-ooo-": 0, "--x-oox--": 3, "x---oox-o": 3, "-xxo-ox-o": 0, "o-oxx--xo": 1, "xoxoo---x": 5, "-o-xxoo-x": 0, "xoxo-oox-": 4, "o-x-o-x--": 8, "oo--xx-o-": 3, "-ox---x-o": 0, "xo---ooxx": 4, "x-ooxxxoo": 1, "---xooxxo": 0, "---ooxo-x": 2, "--o-o-x--": 0, "-x------o": 2, "oo-ox-x--": 2, "-o-xoo-xx": 6, "xoxxooo--": 7, "--o-xo---": 8, "o----x---": 2, "-ox-x-oo-": 8, "oo--x-xox": 2, "---ox--ox": 0, "ooxxo-ox-": 8, "x-x-oo--o": 3, "xooox-ox-": 8, "o-o-x-x--": 1, "o-x-xoo--": 3, "--ox-o---": 8, "ooxx-oox-": 4, "-xxx-o-oo": 0, "xoo-----x": 3, "xo-x---o-": 6, "o-xxo-ox-": 8, "ooxxx--oo": 5, "ooxx--o-x": 5, "---x-oxoo": 0, "o---xx-oo": 3, "xo-ox---o": 6, "o--xx---o": 1, "xo-ox-x-o": 2, "xooox-xo-": 8, "--o---oxx": 4, "xox-oxo--": 8, "x-o-xxoo-": 3, "x-xoxoo--": 7, "ooxx---ox": 4, "-o-xoxx-o": 0, "o---xo-ox": 6, "o--oxoxxo": 2, "--x-o--o-": 1, "--x-xooox": 0, "xo-oox-xo": 6, "-xoo-xx-o": 7, "-----xoox": 0, "oxox---ox": 5, "--xo-xxoo": 1, "---xoxxoo": 0, "xx-xooo--": 2, "xoo--xxo-": 4, "xoo------": 3, "-ox--x-o-": 4, "o--oxx-xo": 1, "ooxx-ooxx": 4, "xo-ox----": 6, "---oxxo--": 0, "oo-xo-x-x": 7, "--x-o--ox": 5, "-oxo---ox": 5, "-xoo--xxo": 4, "xx-ox--oo": 2, "ox--x---o": 2, "-x-xo-o--": 2, "x-xooxxoo": 1, "-oo---xx-": 0, "o--oxoxx-": 8, "oxooxx--o": 7, "xooooxx--": 7, "-ox----xo": 4, "x-oo-x-o-": 6, "-xxo---o-": 0, "-ox-x--o-": 8, "xxo---o--": 4, "---o-x-o-": 6, "oo--xox-x": 2, "x-o--ooxx": 4, "-o-o-x--x": 0, "o--oxxxo-": 2, "-o--x--o-": 6, "--xx-o-o-": 0, "xx-ooxoo-": 2, "xx-o-oox-": 4, "x-oooxx--": 8, "-o-o-ox-x": 7, "x-oo--oxx": 4, "-xo-o-x--": 0, "xoo---xox": 3, "-x-xooxo-": 0, "oxx----o-": 8, "-x-xo---o": 0, "-o----xxo": 0, "-xo--x-o-": 8, "--xx-ooox": 0, "xx-o--oo-": 8, "oox--x---": 4, "oox--x-xo": 4, "x---oxoxo": 2, "x-xxo-o-o": 1, "-o-o-x---": 6, "x-oxx--oo": 5, "--xxo-oox": 5, "x--o-xo-o": 7, "--o------": 4, "-ox-x--oo": 6, "oxoxxo---": 7, "ox---oxo-": 8, "o-xxxo-oo": 6, "xox---o--": 8, "-o-oxxo--": 0, "---o-x-ox": 4, "-xoxx-oo-": 8, "x-o-xx-oo": 3, "----xo-o-": 6, "-xx--oxoo": 3, "----ooxx-": 8, "x-oooxx-o": 7, "-x-oxox-o": 2, "xo--o-oxx": 2, "ooxo-oxx-": 8, "xx-o--oox": 5, "--x--oox-": 1, "xx-ox-oo-": 2, "-o-o-xx--": 2, "oxoxxoxo-": 8, "x-o--xxoo": 3, "oox--x--o": 4, "-xxox-o-o": 0, "o-oo--x-x": 7, "o-xxooxox": 1, "-xo-o--ox": 6, "--oxxooox": 0, "-x-o-oxo-": 4, "-o-xx-oxo": 5, "--oox-x--": 8, "x-oxoo-x-": 6, "x-o---x-o": 3, "-x-ooxxoo": 0, "--o-oox-x": 3, "x-xo----o": 1, "xox--ooox": 4, "-ooox-x-x": 7, "o-ox-xxo-": 4, "o-xx--oox": 5, "x---x--oo": 6, "xooxoo-xx": 6, "xooo-x--x": 6, "--x---o--": 8, "-o-oxo--x": 2, "-o-xo-oxx": 2, "o-x--o-ox": 4, "-ooxooxx-": 8, "o-oxx-xoo": 5, "-ooox-x--": 0, "ox-oox--x": 2, "oox-xxo--": 8, "oo--oxxx-": 8, "x-xo-o-ox": 4, "--oox-x-o": 5, "--o-xxo--": 3, "-ox-o--x-": 8, "o-x-x-oox": 5, "xo--xo---": 3, "x-x--ooo-": 1, "xo---oxxo": 3, "oox-x--o-": 6, "o-o-x-oxx": 1, "oo--x----": 2, "o-o-xox-x": 7, "---oo-x-x": 7, "xoooo-x-x": 7, "-----ox--": 4, "-oooxx-xo": 0, "-xoo--x--": 7, "x-o------": 6, "o---oxoxx": 2, "xoo-xxxoo": 3, "-ox-xoo-x": 0, "xooo-x---": 7, "xooxoo-x-": 6, "--oxo---x": 6, "o-oxx-xo-": 5, "xooo-x-x-": 8, "xxo---oox": 4, "o-o-xxxo-": 3, "ooxoxo-xx": 6, "xo-oox---": 7, "oxoox-x--": 7, "--x-oooxx": 3, "o-oxxoox-": 1, "-ox-xoo--": 0, "ooxx-o--x": 6, "xoxoo-oxx": 5, "oox-xoo-x": 3, "--xx-ooo-": 8, "xoxo---xo": 4, "oxoxx---o": 7, "xxo--o-ox": 6, "-oxoo--x-": 5, "ox-o-xxo-": 2, "-ooxx--xo": 5, "xoxo-xoxo": 4, "o---o-x--": 8, "-oxoox-xo": 0, "--o-xox--": 8, "-o--xx-oo": 6, "o---oox-x": 7, "--xxoooox": 1, "xo--ooxx-": 8, "xo--ooxxo": 3, "xxoo-xxoo": 4, "-xoxxooox": 0, "-oxx--xoo": 0, "-xooxxo--": 7, "o-oxxooxx": 1, "o-o-xxxoo": 3, "-oxoxx--o": 6, "xooo-o-xx": 6, "ooxox----": 6, "-oox-xx-o": 4, "xo-oox--x": 2, "x--oxo-o-": 2, "x-oxo--xo": 6, "oxo-x-oox": 3, "-xo-oo-xx": 6, "ox-o----x": 6, "xooooxx-x": 7, "xoo-x-x-o": 3, "--ooxoxox": 0, "o---o-x-x": 2, "xx-oo-x-o": 2, "xoxxooo-x": 7, "oo-x-x-ox": 2, "o--x--ox-": 4, "-xxoo-x-o": 0, "-xoo-oxx-": 4, "--ooxxx-o": 7, "xo---x--o": 3} class EnforcedTimeExecption(Exception): pass def EnforcedTimeHandler(signum, frame): raise EnforcedTimeExecption() class Player61(object): def __init__(self): self.number_of_moves = 0 self.player_symbol = None self.opponent_symbol = None self.actual_board = [[]] self.status_board = [] self.backup_status_board = [] self.transposition_table = {} self.heuristic_minimax_table = heuristic_table self.is_max_player = True def make_board_str(self): string = "" for i in xrange(0,9): for j in xrange(0,9): string += self.actual_board[i][j] return string def make_block_str(self,board,block_number): x,y = self.get_block_coords(block_number) string = "" for i in xrange(x,x+3): for j in xrange(y,y+3): string += board[i][j] return string def get_block_coords(self,block_number): return { 0 : (0, 0), 1 : (0, 3), 2 : (0, 6), 3 : (3, 0), 4 : (3, 3), 5 : (3, 6), 6 : (6, 0), 7 : (6, 3), 8 : (6, 6), }.get(block_number) def get_status_of_block(self,block_number,current_block,our_symbol): has_completed = True first_win=0 x,y = self.get_block_coords(block_number) our_symbol = self.player_symbol other_symbol = self.opponent_symbol for i in xrange(x,x+3): for j in xrange(y,y+3): if not (current_block[i][j] == other_symbol or current_block[i][j] == our_symbol): has_completed = False if current_block[x][y] == our_symbol and current_block[x + 1][y] == our_symbol and current_block[x + 2][y] == our_symbol: if first_win==0: first_win=1 elif current_block[x][y + 1] == our_symbol and current_block[x + 1][y + 1] == our_symbol and current_block[x + 2][y + 1] == our_symbol: if first_win==0: first_win=1 elif current_block[x][y + 2] == our_symbol and current_block[x + 1][y + 2] == our_symbol and current_block[x + 2][y + 2] == our_symbol: if first_win==0: first_win=1 elif current_block[x][y] == our_symbol and current_block[x][y + 1] == our_symbol and current_block[x][y + 2] == our_symbol: if first_win==0: first_win=1 elif current_block[x + 1][y] == our_symbol and current_block[x + 1][y + 1] == our_symbol and current_block[x + 1][y + 2] == our_symbol: if first_win==0: first_win=1 elif current_block[x + 2][y] == our_symbol and current_block[x + 2][y + 1] == our_symbol and current_block[x + 2][y + 2] == our_symbol: if first_win==0: first_win=1 elif current_block[x][y] == our_symbol and current_block[x + 1][y + 1] == our_symbol and current_block[x + 2][y + 2] == our_symbol: if first_win==0: first_win=1 elif current_block[x + 2][y] == our_symbol and current_block[x + 1][y + 1] == our_symbol and current_block[x][y + 2] == our_symbol: if first_win==0: first_win=1 if current_block[x][y] == other_symbol and current_block[x + 1][y] == other_symbol and current_block[x + 2][y] == other_symbol: if first_win==0: first_win=-1 elif current_block[x][y + 1] == other_symbol and current_block[x + 1][y + 1] == other_symbol and current_block[x + 2][y + 1] == other_symbol: if first_win==0: first_win=-1 elif current_block[x][y + 2] == other_symbol and current_block[x + 1][y + 2] == other_symbol and current_block[x + 2][y + 2] == other_symbol: if first_win==0: first_win=-1 elif current_block[x][y] == other_symbol and current_block[x][y + 1] == other_symbol and current_block[x][y + 2] == other_symbol: if first_win==0: first_win=-1 elif current_block[x + 1][y] == other_symbol and current_block[x + 1][y + 1] == other_symbol and current_block[x + 1][y + 2] == other_symbol: if first_win==0: first_win=-1 elif current_block[x + 2][y] == other_symbol and current_block[x + 2][y + 1] == other_symbol and current_block[x + 2][y + 2] == other_symbol: if first_win==0: first_win=-1 elif current_block[x][y] == other_symbol and current_block[x + 1][y + 1] == other_symbol and current_block[x + 2][y + 2] == other_symbol: if first_win==0: first_win=-1 elif current_block[x + 2][y] == other_symbol and current_block[x + 1][y + 1] == other_symbol and current_block[x][y + 2] == other_symbol: if first_win==0: first_win=-1 return (has_completed,first_win) def get_permitted_blocks(self,old_move): for_corner = [ 0, 2, 3, 5, 6, 8 ] blocks_allowed = [] if old_move[0] in for_corner and old_move[1] in for_corner: if old_move[0] % 3 == 0 and old_move[1] % 3 == 0: blocks_allowed = [0, 1, 3] elif old_move[0] % 3 == 0 and old_move[1] in [2, 5, 8]: blocks_allowed = [1,2,5] elif old_move[0] in [2,5, 8] and old_move[1] % 3 == 0: blocks_allowed = [3,6,7] elif old_move[0] in [2,5,8] and old_move[1] in [2,5,8]: blocks_allowed = [5,7,8] else: if old_move[0] % 3 == 0 and old_move[1] in [1,4,7]: blocks_allowed = [1] elif old_move[0] in [1,4,7] and old_move[1] % 3 == 0: blocks_allowed = [3] elif old_move[0] in [2,5,8] and old_move[1] in [1,4,7]: blocks_allowed = [7] elif old_move[0] in [1,4,7] and old_move[1] in [2,5,8]: blocks_allowed = [5] elif old_move[0] in [1,4,7] and old_move[1] in [1,4,7]: blocks_allowed = [4] for i in reversed(blocks_allowed): if self.status_board[i] != '-': blocks_allowed.remove(i) return blocks_allowed def get_empty_out_of(self,blal): cells = [] for idb in blal: id1 = idb/3 id2 = idb%3 for i in range(id1*3,id1*3+3): for j in range(id2*3,id2*3+3): if self.actual_board[i][j] == '-': cells.append((i,j)) if cells == []: for i in range(9): for j in range(9): no = (i/3)*3 no += (j/3) if self.actual_board[i][j] == '-' and self.status_board[no] == '-': cells.append((i,j)) return cells def game_completed(self,current_board,our_symbol): q = [0 for x in xrange(0,9)] w = [0 for x in xrange(0,9)] j=0 for i in xrange(0,9): q[i],w[i]=self.get_status_of_block(i,current_board,our_symbol) for i in xrange(0,9): if q[i]==True or w[i]!=0: j += 1 if w[1]+w[2]+w[0]==3 or w[3]+w[4]+w[5]==3 or w[6]+w[7]+w[8]==3 or w[0]+w[3]+w[6]==3 or w[1]+w[4]+w[7]==3 or w[2]+w[5]+w[8]==3 or w[0]+w[5]+w[8]==3 or w[2]+w[5]+w[7]==3: return (j,10) elif w[1]+w[2]+w[0]==-3 or w[3]+w[4]+w[5]==-3 or w[6]+w[7]+w[8]==-3 or w[0]+w[3]+w[6]==-3 or w[1]+w[4]+w[7]==-3 or w[2]+w[5]+w[8]==-3 or w[0]+w[5]+w[8]==-3 or w[2]+w[5]+w[7]==-3: return (j,-10) else: return (j,0) def get_board_status(self): return self.get_status_block(0, self.status_board, self.player_symbol) def bind_symbol(self,our_symbol): self.player_symbol = our_symbol self.opponent_symbol = 'x' if self.player_symbol == self.opponent_symbol: self.opponent_symbol = 'o' def get_move_from_number(self,block_number,move_number): x,y = self.get_block_coords(block_number) a,b = self.get_block_coords(move_number) # Just got very lazy there. :) return ((x + (a/3)), (y + (b/3))) def copy_current_board_elems(self,current_board,board_stat): self.actual_board = current_board[:] self.status_board = board_stat[:] def return_random_move(self,possible_moves): return random.choice(possible_moves) def make_minimax_saved_move(self,current_board,blocks_allowed,cells): acc_moves = [] for block_number in blocks_allowed: string = self.make_block_str(current_board,block_number) try: move_number = self.heuristic_minimax_table[string] cell = self.get_move_from_number(block_number,move_number) if cell in cells: acc_moves.append(cell) except: pass try: return random.choice(acc_moves) except: return random.choice(cells) def reverse_board_status(self): self.status_board = self.backup_status_board[:] def heuristic_score(self,board): winnable_x = [8,8,8,8,8,8,8,8,8] lines_x = [[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1]] winnable_o = [8,8,8,8,8,8,8,8,8] lines_o = [[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1],[1,1,1,1,1,1,1,1]] for index in xrange(9): block_coords = self.get_block_coords(index) x = block_coords[0] y = block_coords[1] if board[x][y] == 'x': if lines_o[0][0] == 1: lines_o[0][0] = 0 winnable_o[index] -= 1 if lines_o[0][3] == 1: lines_o[0][3] = 0 winnable_o[index] -= 1 if lines_o[0][6] == 1: lines_o[0][6] = 0 winnable_o[index] -= 1 elif board[x][y] == 'o': if lines_x[0][0] == 1: lines_x[0][0] = 0 winnable_x[index] -= 1 if lines_x[0][3] == 1: lines_x[0][3] = 0 winnable_x[index] -= 1 if lines_x[0][6] == 1: lines_x[0][6] = 0 winnable_x[index] -= 1 if board[x+2][y] == 'x': if lines_o[0][0] == 1: lines_o[0][0] = 0 winnable_o[index] -= 1 if lines_o[0][5] == 1: lines_o[0][5] = 0 winnable_o[index] -= 1 if lines_o[0][7] == 1: lines_o[0][7] = 0 winnable_o[index] -= 1 elif board[x+2][y] == 'o': if lines_x[0][0] == 1: lines_x[0][0] = 0 winnable_x[index] -= 1 if lines_x[0][5] == 1: lines_x[0][5] = 0 winnable_x[index] -= 1 if lines_x[0][7] == 1: lines_x[0][7] = 0 winnable_x[index] -= 1 if board[x][y+2] == 'x': if lines_o[0][2] == 1: lines_o[0][2] = 0 winnable_o[index] -= 1 if lines_o[0][3] == 1: lines_o[0][3] = 0 winnable_o[index] -= 1 if lines_o[0][7] == 1: lines_o[0][7] = 0 winnable_o[index] -= 1 elif board[x][y+2] == 'o': if lines_x[0][2] == 1: lines_x[0][2] = 0 winnable_x[index] -= 1 if lines_x[0][3] == 1: lines_x[0][3] = 0 winnable_x[index] -= 1 if lines_x[0][7] == 1: lines_x[0][7] = 0 winnable_x[index] -= 1 if board[x+2][y+2] == 'x': if lines_o[0][2] == 1: lines_o[0][2] = 0 winnable_o[index] -= 1 if lines_o[0][5] == 1: lines_o[0][5] = 0 winnable_o[index] -= 1 if lines_o[0][6] == 1: lines_o[0][6] = 0 winnable_o[index] -= 1 elif board[x+2][y+2] == 'o': if lines_x[0][2] == 1: lines_x[0][2] = 0 winnable_x[index] -= 1 if lines_x[0][5] == 1: lines_x[0][5] = 0 winnable_x[index] -= 1 if lines_x[0][6] == 1: lines_x[0][6] = 0 winnable_x[index] -= 1 if board[x+1][y] == 'x': if lines_o[0][0] == 1: lines_o[0][0] = 0 winnable_o[index] -= 1 if lines_o[0][4] == 1: lines_o[0][4] = 0 winnable_o[index] -= 1 elif board[x+1][y] == 'o': if lines_x[0][0] == 1: lines_x[0][0] = 0 winnable_x[index] -= 1 if lines_x[0][4] == 1: lines_x[0][4] = 0 winnable_x[index] -= 1 if board[x][y+1] == 'x': if lines_o[0][1] == 1: lines_o[0][1] = 0 winnable_o[index] -= 1 if lines_o[0][3] == 1: lines_o[0][3] = 0 winnable_o[index] -= 1 elif board[x][y+1] == 'o': if lines_x[0][1] == 1: lines_x[0][1] = 0 winnable_x[index] -= 1 if lines_x[0][3] == 1: lines_x[0][3] = 0 winnable_x[index] -= 1 if board[x+2][y+1] == 'x': if lines_o[0][1] == 1: lines_o[0][1] = 0 winnable_o[index] -= 1 if lines_o[0][5] == 1: lines_o[0][5] = 0 winnable_o[index] -= 1 elif board[x+2][y+1] == 'o': if lines_x[0][1] == 1: lines_x[0][1] = 0 winnable_x[index] -= 1 if lines_x[0][5] == 1: lines_x[0][5] = 0 winnable_x[index] -= 1 if board[x+1][y+2] == 'x': if lines_o[0][2] == 1: lines_o[0][2] = 0 winnable_o[index] -= 1 if lines_o[0][4] == 1: lines_o[0][4] = 0 winnable_o[index] -= 1 elif board[x+1][y+2] == 'o': if lines_x[0][2] == 1: lines_x[0][2] = 0 winnable_x[index] -= 1 if lines_x[0][4] == 1: lines_x[0][4] = 0 winnable_x[index] -= 1 #Center if board[x+1][y+1] == 'x': if lines_o[0][1] == 1: lines_o[0][1] = 0 winnable_o[index] -= 1 if lines_o[0][4] == 1: lines_o[0][4] = 0 winnable_o[index] -= 1 if lines_o[0][6] == 1: lines_o[0][6] = 0 winnable_o[index] -= 1 if lines_o[0][7] == 1: lines_o[0][7] = 0 winnable_o[index] -= 1 elif board[x+1][y+1] == 'o': if lines_x[0][1] == 1: lines_x[0][1] = 0 winnable_x[index] -= 1 if lines_x[0][4] == 1: lines_x[0][4] = 0 winnable_x[index] -= 1 if lines_x[0][6] == 1: lines_x[0][6] = 0 winnable_x[index] -= 1 if lines_x[0][7] == 1: lines_x[0][7] = 0 winnable_x[index] -= 1 h_list = [] for index in xrange(9): h_list.append( winnable_x[index] - winnable_o[index] ) winnable_X = 8 #Winnable lines for X on bigger board winnable_O = 8 #Winnable lines for O on bigger board for index in xrange(9): if index in [0,2,6,8]: if h_list[index] > 0: winnable_O -= 3 elif h_list[index] < 0: winnable_X -= 3 elif index in [1,3,5,7]: if h_list[index] > 0: winnable_O -= 2 elif h_list[index] < 0: winnable_X -= 2 else: if h_list[index] > 0: winnable_O -= 4 elif h_list[index] < 0: winnable_X -= 4 H = winnable_X - winnable_O return H def update_and_save_board_status(self,move_ret,symbol): self.backup_status_board = self.status_board[:] block_no = (move_ret[0]/3)*3 + (move_ret[1])/3 id1 = block_no/3 id2 = block_no%3 mg = 0 mflg = 0 if self.status_board[block_no] == '-': if self.actual_board[id1*3][id2*3] == self.actual_board[id1*3+1][id2*3+1] and self.actual_board[id1*3+1][id2*3+1] == self.actual_board[id1*3+2][id2*3+2] and self.actual_board[id1*3+1][id2*3+1] != '-': mflg=1 if self.actual_board[id1*3+2][id2*3] == self.actual_board[id1*3+1][id2*3+1] and self.actual_board[id1*3+1][id2*3+1] == self.actual_board[id1*3][id2*3 + 2] and self.actual_board[id1*3+1][id2*3+1] != '-': mflg=1 if mflg != 1: for i in range(id2*3,id2*3+3): if self.actual_board[id1*3][i]==self.actual_board[id1*3+1][i] and self.actual_board[id1*3+1][i] == self.actual_board[id1*3+2][i] and self.actual_board[id1*3][i] != '-': mflg = 1 break if mflg != 1: for i in range(id1*3,id1*3+3): if self.actual_board[i][id2*3]==self.actual_board[i][id2*3+1] and self.actual_board[i][id2*3+1] == self.actual_board[i][id2*3+2] and self.actual_board[i][id2*3] != '-': mflg = 1 break if mflg == 1: self.status_board[block_no] = symbol id1 = block_no/3 id2 = block_no%3 cells = [] for i in range(id1*3,id1*3+3): for j in range(id2*3,id2*3+3): if self.actual_board[i][j] == '-': cells.append((i,j)) if cells == [] and mflg != 1: self.status_board[block_no] = 'd' def _get_symbol_from_is_maximizing_player(self, is_maximizing_player): if is_maximizing_player: return self.player_symbol else: return self.opponent_symbol def perform_heuristic(self,cell): x,y = cell self.actual_board[x][y] = self._get_symbol_from_is_maximizing_player(self.is_max_player) rv = self.heuristic_score(self.actual_board) self.actual_board[x][y] = "-" return rv def negamax_alpha_beta_transposition_table(self, opponent_move, depth, alpha, beta, is_maximizing_player): self.is_max_player = is_maximizing_player alpha_orig = alpha blocks_allowed = self.get_permitted_blocks(opponent_move) cells = self.get_empty_out_of(blocks_allowed) if not cells: if is_maximizing_player: return (None, -99999) else: return (None, 99999) board_str = self.make_board_str() try: tt_depth,tt_flag,tt_value,tt_cell = self.transposition_table[board_str] if tt_depth >= depth: if tt_flag == 0: return (tt_cell,tt_value) elif tt_flag == -1: alpha = max(alpha,tt_value) elif tt_flag == 1: beta = min(beta,tt_value) if alpha >= beta: return (tt_cell,tt_value) except: pass game_status, game_score = self.game_completed(self.actual_board, self._get_symbol_from_is_maximizing_player(is_maximizing_player)) if depth == 0 and is_maximizing_player: return (None, self.heuristic_score(self.actual_board)) if depth == 0 and not is_maximizing_player: return (None, -self.heuristic_score(self.actual_board)) elif game_status == 9: return (None, game_score) if is_maximizing_player: v = -99999 else: v = 99999 cells.sort(key=self.perform_heuristic) selected_cell = cells[0] for cell in cells: x,y = cell self.actual_board[x][y] = self._get_symbol_from_is_maximizing_player(is_maximizing_player) self.update_and_save_board_status(cell, self._get_symbol_from_is_maximizing_player(is_maximizing_player)) child_node_values = self.negamax_alpha_beta_transposition_table(cell, depth - 1, -beta, -alpha, (not is_maximizing_player)) self.actual_board[x][y] = '-' self.reverse_board_status() old_v = v v = max(v, -1*child_node_values[1]) if v != old_v: selected_cell = cell alpha = max(alpha,v) if beta <= alpha: break new_entry_value = v if new_entry_value <= alpha_orig: new_entry_flag = 1 elif new_entry_value >= beta: new_entry_flag = -1 else: new_entry_flag = 0 new_entry_depth = depth self.transposition_table[board_str] = (new_entry_depth,new_entry_flag,new_entry_value,selected_cell) return (selected_cell, v) def move(self,current_board,board_stat,opponent_move,our_symbol): self.bind_symbol(our_symbol) self.copy_current_board_elems(current_board,board_stat) self.number_of_moves = self.number_of_moves + 1 blocks_allowed = self.get_permitted_blocks(opponent_move) cells = self.get_empty_out_of(blocks_allowed) if self.number_of_moves < 8: depth = 3 elif self.number_of_moves < 16: depth = 5 else: depth = 7 signal.signal(signal.SIGALRM, EnforcedTimeHandler) signal.alarm(ENFORCED_TIME) try: move, value = self.negamax_alpha_beta_transposition_table(opponent_move, depth, -99999, 99999, True) except EnforcedTimeExecption: move = self.make_minimax_saved_move(current_board,blocks_allowed,cells) signal.alarm(0) if move not in cells: move = random.choice(cells) print self.player_symbol return move
135.417122
50,547
0.427109
14,535
74,344
2.135673
0.022635
0.02139
0.01353
0.017782
0.341505
0.226886
0.198827
0.1785
0.173539
0.168836
0
0.071839
0.210979
74,344
548
50,548
135.664234
0.457355
0.001426
0
0.489443
0
0
0.386171
0
0
0
0
0
0
0
null
null
0.005758
0.007678
null
null
0.001919
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
e6fd0bc7021a4a7ae71d07914a2f31e34dc9ca83
126
py
Python
cmapfile/__main__.py
cgohlke/cmapfile
0daf3b657b0b7eea2c3293f50e01ac9c65d59d0d
[ "BSD-3-Clause" ]
1
2020-02-26T17:16:03.000Z
2020-02-26T17:16:03.000Z
cmapfile/__main__.py
cgohlke/cmapfile
0daf3b657b0b7eea2c3293f50e01ac9c65d59d0d
[ "BSD-3-Clause" ]
null
null
null
cmapfile/__main__.py
cgohlke/cmapfile
0daf3b657b0b7eea2c3293f50e01ac9c65d59d0d
[ "BSD-3-Clause" ]
null
null
null
# cmapfile/__main__.py """Cmapfile package command line script.""" import sys from .cmapfile import main sys.exit(main())
12.6
43
0.730159
17
126
5.176471
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.142857
126
9
44
14
0.814815
0.468254
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
fc00904e771d29ad0fdaef094c014ef47fde46bc
89
py
Python
plotting/custom_styles.py
woojiahao/pds-analysis
d84c8353b7f7323d673c530e0d414d87f80d5384
[ "MIT" ]
4
2018-08-10T13:56:58.000Z
2020-04-09T13:32:08.000Z
plotting/custom_styles.py
woojiahao/pds-analysis
d84c8353b7f7323d673c530e0d414d87f80d5384
[ "MIT" ]
null
null
null
plotting/custom_styles.py
woojiahao/pds-analysis
d84c8353b7f7323d673c530e0d414d87f80d5384
[ "MIT" ]
null
null
null
from pygal.style import Style style = Style( font_family='googlefont:Source+Code+Pro')
17.8
42
0.775281
13
89
5.230769
0.769231
0.294118
0
0
0
0
0
0
0
0
0
0
0.11236
89
4
43
22.25
0.860759
0
0
0
0
0
0.292135
0.292135
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
fc15c2806479b8cec5d97cecfc698a2d07a30776
233
py
Python
chap 4/dimensions.py
wikilike7/python-crash-course
85cd7a2ab6e43a554c282b6e0c1c44c415cca3a3
[ "MIT" ]
null
null
null
chap 4/dimensions.py
wikilike7/python-crash-course
85cd7a2ab6e43a554c282b6e0c1c44c415cca3a3
[ "MIT" ]
null
null
null
chap 4/dimensions.py
wikilike7/python-crash-course
85cd7a2ab6e43a554c282b6e0c1c44c415cca3a3
[ "MIT" ]
1
2019-03-05T09:31:27.000Z
2019-03-05T09:31:27.000Z
dimensions = (200, 50) print(dimensions[0]) print(dimensions[1]) # dimensions[0] = 100 # print(dimensions) for dimension in dimensions: print(dimension) dimensions = [100, 300] for dimension in dimensions: print(dimension)
17.923077
28
0.72103
29
233
5.793103
0.37931
0.267857
0.166667
0.285714
0.452381
0.452381
0
0
0
0
0
0.086294
0.154506
233
13
29
17.923077
0.766497
0.158798
0
0.5
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
fc23b73fbc53178eb143b167a3dfb52b02878f63
18,276
py
Python
ID18_U18_ONE_LENS/coherent_fraction_vs_slit_aperture.py
srio/paper-transfocators-resources
917d8b4114056f62c84b295579e55bf5f0b56b6b
[ "MIT" ]
1
2021-03-25T15:34:56.000Z
2021-03-25T15:34:56.000Z
ID18_U18_ONE_LENS/coherent_fraction_vs_slit_aperture.py
srio/paper-transfocators-resources
917d8b4114056f62c84b295579e55bf5f0b56b6b
[ "MIT" ]
null
null
null
ID18_U18_ONE_LENS/coherent_fraction_vs_slit_aperture.py
srio/paper-transfocators-resources
917d8b4114056f62c84b295579e55bf5f0b56b6b
[ "MIT" ]
null
null
null
# # Import section # import numpy from syned.beamline.beamline_element import BeamlineElement from syned.beamline.element_coordinates import ElementCoordinates from wofry.propagator.propagator import PropagationManager, PropagationElements, PropagationParameters from wofry.propagator.wavefront1D.generic_wavefront import GenericWavefront1D from wofryimpl.propagator.propagators1D.fresnel import Fresnel1D from wofryimpl.propagator.propagators1D.fresnel_convolution import FresnelConvolution1D from wofryimpl.propagator.propagators1D.fraunhofer import Fraunhofer1D from wofryimpl.propagator.propagators1D.integral import Integral1D from wofryimpl.propagator.propagators1D.fresnel_zoom import FresnelZoom1D from wofryimpl.propagator.propagators1D.fresnel_zoom_scaling_theorem import FresnelZoomScaling1D # # SOURCE======================== # def run_source_und_h(my_mode_index=0, energy_in_keV=7): global coherent_mode_decomposition_h try: tmp = coherent_mode_decomposition_h except: ########## SOURCE ########## if energy_in_keV == 7: K = 1.85108 elif energy_in_keV == 15: K = 0.729628 elif energy_in_keV == 30: K = 1.341095 else: raise Exception("Please provide K value") # # create output_wavefront # # from wofryimpl.propagator.util.undulator_coherent_mode_decomposition_1d import UndulatorCoherentModeDecomposition1D coherent_mode_decomposition_h = UndulatorCoherentModeDecomposition1D( electron_energy=6, electron_current=0.2, undulator_period=0.018, undulator_nperiods=138, K=K, photon_energy=1e3 * energy_in_keV, abscissas_interval=0.00025, number_of_points=800, distance_to_screen=100, scan_direction='H', sigmaxx=2.97321e-05, sigmaxpxp=4.37237e-06, useGSMapproximation=False,) # make calculation coherent_mode_decomposition_results = coherent_mode_decomposition_h.calculate() mode_index = 0 output_wavefront = coherent_mode_decomposition_h.get_eigenvector_wavefront(mode_index) output_wavefront = coherent_mode_decomposition_h.get_eigenvector_wavefront(my_mode_index) return output_wavefront def run_source_und_v(my_mode_index=0, energy_in_keV=7): global coherent_mode_decomposition_v try: tmp = coherent_mode_decomposition_v except: if energy_in_keV == 7: K = 1.85108 elif energy_in_keV == 15: K = 0.729628 elif energy_in_keV == 30: K = 1.341095 else: raise Exception("Please provide K value") ########## SOURCE ########## # # create output_wavefront # # from wofryimpl.propagator.util.undulator_coherent_mode_decomposition_1d import \ UndulatorCoherentModeDecomposition1D coherent_mode_decomposition_v = UndulatorCoherentModeDecomposition1D( electron_energy=6, electron_current=0.2, undulator_period=0.018, undulator_nperiods=138, K=K, photon_energy=1e3 * energy_in_keV, abscissas_interval=0.00025, number_of_points=800, distance_to_screen=100, scan_direction='V', sigmaxx=5.2915e-06, sigmaxpxp=1.88982e-06, useGSMapproximation=False, ) # make calculation coherent_mode_decomposition_results = coherent_mode_decomposition_v.calculate() mode_index = 0 output_wavefront = coherent_mode_decomposition_v.get_eigenvector_wavefront(mode_index) output_wavefront = coherent_mode_decomposition_v.get_eigenvector_wavefront(my_mode_index) return output_wavefront def run_source_gsm_h(my_mode_index=0, energy_in_keV=7): ########## SOURCE ########## # # create output_wavefront # # if energy_in_keV == 7: sigma_x=3.00818e-05 beta = 0.129748 elif energy_in_keV == 15: sigma_x = 2.98958e-05 beta = 0.0729639 elif energy_in_keV == 30: sigma_x = 2.98141e-05 beta = 0.0409362 else: raise Exception("Please provide sigma_x and beta values") output_wavefront = GenericWavefront1D.initialize_wavefront_from_range(x_min=-0.00012, x_max=0.00012, number_of_points=1000) output_wavefront.set_photon_energy(1e3 * energy_in_keV) output_wavefront.set_gaussian_hermite_mode(sigma_x=sigma_x, amplitude=1, mode_x=0, shift=0, beta=beta) # previous command is useless but... output_wavefront.set_gaussian_hermite_mode(sigma_x=sigma_x, amplitude=1, mode_x=my_mode_index, shift=0, beta=beta) return output_wavefront # # SOURCE======================== # def run_source_gsm_v(my_mode_index=0, energy_in_keV=7): ########## SOURCE ########## # # create output_wavefront # # if energy_in_keV == 7: sigma_x=6.99408e-06 beta=1.01172 elif energy_in_keV == 15: sigma_x = 6.14502e-06 beta = 0.624601 elif energy_in_keV == 30: sigma_x = 5.73417e-06 beta = 0.387842 else: raise Exception("Please provide sigma_x and beta values") output_wavefront = GenericWavefront1D.initialize_wavefront_from_range(x_min=-5e-05, x_max=5e-05, number_of_points=1000) output_wavefront.set_photon_energy(1e3 * energy_in_keV) # output_wavefront.set_gaussian_hermite_mode(sigma_x=5.84299e-06, amplitude=1, mode_x=0, shift=0, beta=1.56094) output_wavefront.set_gaussian_hermite_mode(sigma_x=sigma_x, amplitude=1, mode_x=0, shift=0, beta=beta) # previous command is useless but... output_wavefront.set_gaussian_hermite_mode(sigma_x=sigma_x, amplitude=1, mode_x=my_mode_index, shift=0, beta=beta) return output_wavefront # # BEAMLINE======================== # def run_beamline_h(output_wavefront,slit=50e-6, gaussian_slit=True): ########## OPTICAL SYSTEM ########## ########## OPTICAL ELEMENT NUMBER 1 ########## input_wavefront = output_wavefront.duplicate() from wofryimpl.beamline.optical_elements.ideal_elements.screen import WOScreen1D optical_element = WOScreen1D() # drift_before 35 m # # propagating # # propagation_elements = PropagationElements() beamline_element = BeamlineElement(optical_element=optical_element, coordinates=ElementCoordinates(p=36.000000, q=0.000000, angle_radial=numpy.radians(0.000000), angle_azimuthal=numpy.radians(0.000000))) propagation_elements.add_beamline_element(beamline_element) propagation_parameters = PropagationParameters(wavefront=input_wavefront, propagation_elements=propagation_elements) # self.set_additional_parameters(propagation_parameters) # propagation_parameters.set_additional_parameters('magnification_x', 8.0) # propagator = PropagationManager.Instance() try: propagator.add_propagator(FresnelZoom1D()) except: pass output_wavefront = propagator.do_propagation(propagation_parameters=propagation_parameters, handler_name='FRESNEL_ZOOM_1D') ########## OPTICAL ELEMENT NUMBER 2 ########## input_wavefront = output_wavefront.duplicate() from syned.beamline.shape import Rectangle boundary_shape = Rectangle(-slit/2, slit/2, -slit/2, slit/2) from wofryimpl.beamline.optical_elements.absorbers.slit import WOGaussianSlit1D, WOSlit1D if gaussian_slit: optical_element = WOGaussianSlit1D(boundary_shape=boundary_shape) else: optical_element = WOSlit1D(boundary_shape=boundary_shape) # no drift in this element output_wavefront = optical_element.applyOpticalElement(input_wavefront) return output_wavefront # # BEAMLINE======================== # def run_beamline_v(output_wavefront,slit=50e-6, gaussian_slit=True): ########## OPTICAL SYSTEM ########## ########## OPTICAL ELEMENT NUMBER 1 ########## input_wavefront = output_wavefront.duplicate() from wofryimpl.beamline.optical_elements.ideal_elements.screen import WOScreen1D optical_element = WOScreen1D() # drift_before 36 m # # propagating # # propagation_elements = PropagationElements() beamline_element = BeamlineElement(optical_element=optical_element, coordinates=ElementCoordinates(p=36.000000, q=0.000000, angle_radial=numpy.radians(0.000000), angle_azimuthal=numpy.radians(0.000000))) propagation_elements.add_beamline_element(beamline_element) propagation_parameters = PropagationParameters(wavefront=input_wavefront, propagation_elements=propagation_elements) # self.set_additional_parameters(propagation_parameters) # propagation_parameters.set_additional_parameters('magnification_x', 10.0) # propagator = PropagationManager.Instance() try: propagator.add_propagator(FresnelZoom1D()) except: pass output_wavefront = propagator.do_propagation(propagation_parameters=propagation_parameters, handler_name='FRESNEL_ZOOM_1D') ########## OPTICAL ELEMENT NUMBER 2 ########## input_wavefront = output_wavefront.duplicate() from syned.beamline.shape import Rectangle boundary_shape = Rectangle(-slit/2, slit/2, -slit/2, slit/2) from wofryimpl.beamline.optical_elements.absorbers.slit import WOGaussianSlit1D, WOSlit1D if gaussian_slit: optical_element = WOGaussianSlit1D(boundary_shape=boundary_shape) else: optical_element = WOSlit1D(boundary_shape=boundary_shape) # no drift in this element output_wavefront = optical_element.applyOpticalElement(input_wavefront) return output_wavefront # # MAIN FUNCTION======================== # def main_h(energy_in_keV=7, source_gsm=True, slit=50e-6,gaussian_slit=False): from orangecontrib.esrf.wofry.util.tally import TallyCoherentModes tally = TallyCoherentModes() for my_mode_index in range(50): if source_gsm: output_wavefront = run_source_gsm_h(energy_in_keV=energy_in_keV, my_mode_index=my_mode_index) else: output_wavefront = run_source_und_h(energy_in_keV=energy_in_keV, my_mode_index=my_mode_index) output_wavefront = run_beamline_h(output_wavefront, slit=slit, gaussian_slit=gaussian_slit) tally.append(output_wavefront) return tally # tally.plot_cross_spectral_density() # tally.plot_spectral_density() # tally.plot_occupation() def main_v(energy_in_keV=7, source_gsm=True, slit=50e-6,gaussian_slit=False): from orangecontrib.esrf.wofry.util.tally import TallyCoherentModes tally = TallyCoherentModes() for my_mode_index in range(50): if source_gsm: output_wavefront = run_source_gsm_v(energy_in_keV=energy_in_keV, my_mode_index=my_mode_index) else: output_wavefront = run_source_und_v(energy_in_keV=energy_in_keV, my_mode_index=my_mode_index) output_wavefront = run_beamline_v(output_wavefront, slit=slit, gaussian_slit=gaussian_slit) tally.append(output_wavefront) return tally # tally.plot_cross_spectral_density() # tally.plot_spectral_density() # tally.plot_occupation() # # MAIN======================== # if __name__ == "__main__": import matplotlib.pylab as plt from srxraylib.plot.gol import plot, plot_show, set_qt set_qt() do_calculate = False do_plots = True energy_in_keV = 7 # 30 # 15 # 7 sources = ["UND"] #, "GSM"] apertures = ["RECTANGULAR"] #,"GAUSSIAN"] subdirectory = "DataCF%d" % energy_in_keV # # # if do_calculate: for aperture in apertures: for source in sources: outfile = "%s/coherent_fraction_vs_slit_source_%s_aperture_%s.dat" % (subdirectory, source, aperture) slits = numpy.concatenate((numpy.linspace(10e-6,310e-6, 101), numpy.linspace(320e-6, 0.0015, 21))) f = open(outfile, "w") if source == "GSM": source_gsm = True else: source_gsm = False if aperture == "GAUSSIAN": gaussian_slit = True else: gaussian_slit = False for slit in slits: tally_v = main_v(energy_in_keV=energy_in_keV, source_gsm=source_gsm, slit=slit, gaussian_slit=gaussian_slit) tally_h = main_h(energy_in_keV=energy_in_keV, source_gsm=source_gsm, slit=slit, gaussian_slit=gaussian_slit) modes_v, occ_v = tally_v.get_occupation() modes_h, occ_h = tally_h.get_occupation() print("slit, CF H, V: ", 1e6*slit, occ_h[0], occ_v[0]) f.write("%g %g %g\n" % (slit, occ_h[0], occ_v[0])) f.close() print("File written to disk: %s" % outfile) # # plots # if do_plots: # for source in sources: # a = numpy.loadtxt("%s/coherent_fraction_vs_slit_source_%s_aperture_GAUSSIAN.dat" % (subdirectory, source)) # b = numpy.loadtxt("%s/coherent_fraction_vs_slit_source_%s_aperture_RECTANGULAR.dat" % (subdirectory, source)) # # if source == "GSM": # title = "GSM source" # else: # title = "UNDULATOR source" # # g = plot( # 1e6 * a[:,0], a[:,1], # 1e6 * a[:,0], a[:,2], # 1e6 * b[:, 0], b[:, 1], # 1e6 * b[:, 0], b[:, 2], # legend=['Horizontal Gaussian', 'Vertical Gaussian', 'Horizontal Rectangular', 'Vertical Rectangular'], # xtitle="Slit aperture [um]", ytitle="Coherent Fraction", title=title, # color = ['green','blue', 'green', 'blue'], # linestyle=['--','--',None,None], # xlog=True, yrange=[0,1.01], show=False) # # g[1].grid() # plt.yticks(numpy.arange(0, 1.1, step=0.1)) # # plot_show() # # for aperture in apertures: # b1 = numpy.loadtxt("%s/coherent_fraction_vs_slit_source_GSM_aperture_%s.dat" % (subdirectory, aperture)) # b2 = numpy.loadtxt("%s/coherent_fraction_vs_slit_source_UND_aperture_%s.dat" % (subdirectory, aperture)) # # g = plot( # 1e6 * b1[:, 0], b1[:, 1], # 1e6 * b1[:, 0], b1[:, 2], # 1e6 * b2[:, 0], b2[:, 1], # 1e6 * b2[:, 0], b2[:, 2], # legend=['Horizontal GSM', 'Vertical GSM', 'Horizontal UND', 'Vertical UND'], # xtitle="Slit aperture [um]", ytitle="Coherent Fraction", title="aperture is %s" % aperture, # color=['green', 'blue', 'green', 'blue'], # linestyle=['--', '--', None, None], # xlog=True, yrange=[0, 1.01], show=False) # # g[1].grid() # plt.yticks(numpy.arange(0, 1.1, step=0.1)) # # plot_show() # # # paper # aperture = apertures[0] source = sources[0] # b7_1 = numpy.loadtxt("%s/coherent_fraction_vs_slit_source_GSM_aperture_%s.dat" % ("DataCF7", aperture)) b7_2 = numpy.loadtxt("%s/coherent_fraction_vs_slit_source_UND_aperture_%s.dat" % ("DataCF7", aperture)) # b15_1 = numpy.loadtxt("%s/coherent_fraction_vs_slit_source_GSM_aperture_%s.dat" % ("DataCF15", aperture)) b15_2 = numpy.loadtxt("%s/coherent_fraction_vs_slit_source_UND_aperture_%s.dat" % ("DataCF15", aperture)) # b30_1 = numpy.loadtxt("%s/coherent_fraction_vs_slit_source_GSM_aperture_%s.dat" % ("DataCF30", aperture)) b30_2 = numpy.loadtxt("%s/coherent_fraction_vs_slit_source_UND_aperture_%s.dat" % ("DataCF30", aperture)) # # smooth # # from scipy.interpolate import make_interp_spline, BSpline # T = 1e6 * b2[:, 0] # power = b2[:, 1] # xnew = numpy.linspace(T.min(), T.max(), T.size) # spl = make_interp_spline(T, power, k=1) # type: BSpline # power_smooth = spl(xnew) from scipy.signal import savgol_filter w7 = savgol_filter(b7_2[:, 1], 5, 2) w15 = savgol_filter(b15_2[:, 1], 5, 2) w30 = savgol_filter(b30_2[:, 1], 5, 2) g = plot( 1e6 * b7_2[:, 0], w7, #b2[:, 1], 1e6 * b7_2[:, 0], b7_2[:, 2], 1e6 * b15_2[:, 0], w15, # b2[:, 1], 1e6 * b15_2[:, 0], b15_2[:, 2], 1e6 * b30_2[:, 0], w30, # b2[:, 1], 1e6 * b30_2[:, 0], b30_2[:, 2], legend=['7 keV Horizontal', '7 keV Vertical', '15 keV Horizontal', '15 keV Vertical', '30 keV Horizontal', '30 keV Vertical', ], xtitle="Slit aperture [um]", ytitle="Coherent Fraction", color=['green', 'green', 'blue', 'blue', 'red', 'red'], linestyle=[None, '--', None, '--', None, '--',], xlog=False, xrange=[0.,1000], yrange=[0, 1.01], show=False) g[1].grid() plt.yticks(numpy.arange(0, 1.1, step=0.1)) def aa(x): return x / 565 def invaa(x): return 565 * x # secax = g[1].secondary_xaxis('top', functions=(aa, invaa)) # secax.set_xlabel('n') plt.savefig("cf_vs_aperture.eps") plot_show()
36.625251
128
0.610309
2,062
18,276
5.118332
0.155674
0.061114
0.037521
0.012507
0.758196
0.729771
0.720485
0.702956
0.685143
0.673204
0
0.050717
0.275005
18,276
499
129
36.625251
0.745811
0.191399
0
0.519084
0
0
0.047939
0.015171
0
0
0
0
0
1
0.038168
false
0.007634
0.091603
0.007634
0.167939
0.007634
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
fc42fb06431948239aaca17f360cd96e878d2e07
129
py
Python
Examples/test_example.py
sntemple12/lambdata
ead7600b7cf1e04c6db39139bdf69641e07f5c7b
[ "MIT" ]
null
null
null
Examples/test_example.py
sntemple12/lambdata
ead7600b7cf1e04c6db39139bdf69641e07f5c7b
[ "MIT" ]
null
null
null
Examples/test_example.py
sntemple12/lambdata
ead7600b7cf1e04c6db39139bdf69641e07f5c7b
[ "MIT" ]
null
null
null
from random import randint import pytest from example import increment, COLORS def test_increment(): assert increment(3) ==5
21.5
37
0.782946
18
129
5.555556
0.722222
0
0
0
0
0
0
0
0
0
0
0.018349
0.155039
129
6
38
21.5
0.899083
0
0
0
0
0
0
0
0
0
0
0
0.2
1
0.2
true
0
0.6
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
fc660c589a3614be0e4aa04c25632432d2625dfe
24
py
Python
dragoneye/version.py
mirzajb/dragoneye
c7593640093a9178a55dd32c738bbf0b1cf49d5e
[ "MIT" ]
1
2021-11-16T19:59:05.000Z
2021-11-16T19:59:05.000Z
dragoneye/version.py
mirzajb/dragoneye
c7593640093a9178a55dd32c738bbf0b1cf49d5e
[ "MIT" ]
null
null
null
dragoneye/version.py
mirzajb/dragoneye
c7593640093a9178a55dd32c738bbf0b1cf49d5e
[ "MIT" ]
null
null
null
__version__ = 'v0.0.74'
12
23
0.666667
4
24
3
1
0
0
0
0
0
0
0
0
0
0
0.190476
0.125
24
1
24
24
0.380952
0
0
0
0
0
0.291667
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
fc702d7db07f01cf0eb790a8691841fd4fed28b8
314
py
Python
test/test_function_softbarcode.py
RubinhoSilva/python-escpos
342d1e37854df52dbbf256cdbbfb8a69a5e1939c
[ "MIT" ]
null
null
null
test/test_function_softbarcode.py
RubinhoSilva/python-escpos
342d1e37854df52dbbf256cdbbfb8a69a5e1939c
[ "MIT" ]
null
null
null
test/test_function_softbarcode.py
RubinhoSilva/python-escpos
342d1e37854df52dbbf256cdbbfb8a69a5e1939c
[ "MIT" ]
null
null
null
#!/usr/bin/python import escpos.printer as printer import pytest @pytest.fixture def instance(): return printer.Dummy() def test_soft_barcode_ean8(instance): instance.soft_barcode("ean8", "1234") def test_soft_barcode_ean8_nocenter(instance): instance.soft_barcode("ean8", "1234", center=False)
17.444444
55
0.751592
42
314
5.404762
0.5
0.193833
0.264317
0.15859
0.502203
0.30837
0
0
0
0
0
0.043796
0.127389
314
17
56
18.470588
0.784672
0.050955
0
0
0
0
0.053872
0
0
0
0
0
0
1
0.333333
false
0
0.222222
0.111111
0.666667
0.222222
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
fc74e8967af583ab4d0a352326b7474f366d8755
163
py
Python
benchmarks/__init__.py
Illviljan/flox
5a2bd5746d5be6c8c4b9d81ea0b688f88e152055
[ "Apache-2.0" ]
26
2021-11-17T12:37:44.000Z
2022-03-14T14:58:33.000Z
benchmarks/__init__.py
Illviljan/flox
5a2bd5746d5be6c8c4b9d81ea0b688f88e152055
[ "Apache-2.0" ]
31
2021-03-24T16:16:54.000Z
2021-11-11T20:54:49.000Z
benchmarks/__init__.py
Illviljan/flox
5a2bd5746d5be6c8c4b9d81ea0b688f88e152055
[ "Apache-2.0" ]
3
2021-05-19T18:46:42.000Z
2021-09-11T06:09:09.000Z
def parameterized(names, params): def decorator(func): func.param_names = names func.params = params return func return decorator
20.375
33
0.638037
18
163
5.722222
0.444444
0
0
0
0
0
0
0
0
0
0
0
0.294479
163
7
34
23.285714
0.895652
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
fc7635fe001dc5e269e8c9fa8fc26caee9bf3e05
162
py
Python
hy/__main__.py
lafrenierejm/hy
abd5ef6a8a3c42c0f097e5ae424296b177d1221f
[ "MIT" ]
3,839
2015-01-01T23:53:13.000Z
2022-03-28T07:41:36.000Z
hy/__main__.py
lafrenierejm/hy
abd5ef6a8a3c42c0f097e5ae424296b177d1221f
[ "MIT" ]
1,430
2015-01-01T18:43:36.000Z
2022-03-31T20:51:21.000Z
hy/__main__.py
lafrenierejm/hy
abd5ef6a8a3c42c0f097e5ae424296b177d1221f
[ "MIT" ]
361
2015-01-02T17:16:11.000Z
2022-03-20T14:40:14.000Z
import sys from hy.cmdline import hy_main # Running hy as a module (e.g. `python -m hy`) # is equivalent to running the main `hy` command. sys.exit(hy_main())
18
49
0.709877
30
162
3.766667
0.666667
0.106195
0
0
0
0
0
0
0
0
0
0
0.179012
162
8
50
20.25
0.849624
0.567901
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
5da8984b58a8e9d320b65e1f156aaf456b8827ce
112
py
Python
vispy/util/dataio/__init__.py
MatthieuDartiailh/vispy
09d429be361a148b0614a192f56d4070c624072c
[ "BSD-3-Clause" ]
1
2017-06-12T16:24:11.000Z
2017-06-12T16:24:11.000Z
vispy/util/dataio/__init__.py
MatthieuDartiailh/vispy
09d429be361a148b0614a192f56d4070c624072c
[ "BSD-3-Clause" ]
null
null
null
vispy/util/dataio/__init__.py
MatthieuDartiailh/vispy
09d429be361a148b0614a192f56d4070c624072c
[ "BSD-3-Clause" ]
null
null
null
from .io import (crate, read_mesh, write_mesh, imread, imsave, # noqa _check_img_lib) # noqa
37.333333
70
0.625
15
112
4.333333
0.866667
0
0
0
0
0
0
0
0
0
0
0
0.285714
112
2
71
56
0.8125
0.080357
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
5de476cdb216c36ceaeac68e3c1ef564ada34d21
82
py
Python
NoSliceException.py
renshj/High-Cadence-Processing
5d5a2df741858f6e1466d7c4b008e9245d4b780a
[ "MIT" ]
null
null
null
NoSliceException.py
renshj/High-Cadence-Processing
5d5a2df741858f6e1466d7c4b008e9245d4b780a
[ "MIT" ]
null
null
null
NoSliceException.py
renshj/High-Cadence-Processing
5d5a2df741858f6e1466d7c4b008e9245d4b780a
[ "MIT" ]
null
null
null
#This file was created by Tate Hagan class NoSliceException(Exception): pass
27.333333
37
0.768293
11
82
5.727273
1
0
0
0
0
0
0
0
0
0
0
0
0.182927
82
3
38
27.333333
0.940299
0.426829
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
b90c83359e9b6e6b18bddf2045d4329e62ac08a5
87
py
Python
wrlapp/apps.py
ShivaGuntuku/TL-DR-Just-Listen
fa5f7ab7ba29c8dd1d1829753c05fd0cafe15016
[ "MIT" ]
null
null
null
wrlapp/apps.py
ShivaGuntuku/TL-DR-Just-Listen
fa5f7ab7ba29c8dd1d1829753c05fd0cafe15016
[ "MIT" ]
10
2020-03-24T15:59:34.000Z
2022-03-11T23:26:29.000Z
wrlapp/apps.py
ShivaGuntuku/TL-DR-Just-Listen
fa5f7ab7ba29c8dd1d1829753c05fd0cafe15016
[ "MIT" ]
null
null
null
from django.apps import AppConfig class WrlappConfig(AppConfig): name = 'wrlapp'
14.5
33
0.747126
10
87
6.5
0.9
0
0
0
0
0
0
0
0
0
0
0
0.172414
87
5
34
17.4
0.902778
0
0
0
0
0
0.068966
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
f8d5cbcc402fe62b532359899727dcbaf07f14a0
99
py
Python
task_manager/enums.py
Serrones/task_manager
6dd7f1b19841929fa96ff41dd2506631663e087b
[ "MIT" ]
null
null
null
task_manager/enums.py
Serrones/task_manager
6dd7f1b19841929fa96ff41dd2506631663e087b
[ "MIT" ]
null
null
null
task_manager/enums.py
Serrones/task_manager
6dd7f1b19841929fa96ff41dd2506631663e087b
[ "MIT" ]
null
null
null
from enum import Enum class Status(str, Enum): done = 'done' in_progress = 'in progress'
14.142857
31
0.656566
14
99
4.571429
0.642857
0.3125
0
0
0
0
0
0
0
0
0
0
0.242424
99
6
32
16.5
0.853333
0
0
0
0
0
0.151515
0
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
5d58bae6d952f5f58c88fee3cccab3f9b5475975
8,601
py
Python
stmp.py
dev-acoustikue/nyscec3
4c7b4d5bfe52f382b692d56133c55d0ad6bcb437
[ "MIT" ]
2
2020-09-07T09:58:16.000Z
2020-12-15T12:46:02.000Z
stmp.py
dev-acoustikue/nyscec3
4c7b4d5bfe52f382b692d56133c55d0ad6bcb437
[ "MIT" ]
null
null
null
stmp.py
dev-acoustikue/nyscec3
4c7b4d5bfe52f382b692d56133c55d0ad6bcb437
[ "MIT" ]
1
2020-12-15T11:32:31.000Z
2020-12-15T11:32:31.000Z
# [Toy Project NYSCEC] # 0.1.2va, 20.07.31. First launched. # written by acoustikue(SukJoon Oh) # # Legal stuff: # This simple code follows MIT license. # # MIT License # Copyright (c) 2020 SukJoon Oh(acoustikue) import config as cf from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart import smtplib NYSCEC_ADMIN_ID = 'your_email' NYSCEC_ADMIN_PW = 'your_stmp_password' NYSCEC_SMTP_SERVER = 'smtp.gmail.com' NYSCEC_SMTP_PORT = 465 def generate_content_2(updates): mail_content = '' for info in updates: mail_content += ( '<p> <b>{0}</b><br> <b>New instances:</b><br>'.format(info['name']) ) if len(info['instances']) != 0: for instance in info['instances']: mail_content += ( '<i>{0}</i><br>'.format(instance) ) else: mail_content += 'No updates.<br>' mail_content += ( '<b>New posts from /jinotechboard:</b><br>') if len(info['posts']) != 0: for post in info['posts']: mail_content += ( '<i>{0}</i><br>'.format(instance) ) else: mail_content += 'No updates.<br>' #mail_content += '<br>' mail_content += '</p>' return mail_content def send_mail(subject, content, updates): #msg = MIMEText(content, _charset='utf-8') msg = MIMEMultipart('alternative') msg['Subject'] = '[NYSCEC] {0}'.format(subject) msg['From'] = 'your_email' msg['To'] = 'your_email' html_frame_upper = '<!doctype html><html> <head> <meta name="viewport" content="width=device-width" /> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /> <title>Simple Notice Form</title> <style> img { border: none; -ms-interpolation-mode: bicubic; max-width: 100%; } body { background-color: #f6f6f6; font-family: sans-serif; -webkit-font-smoothing: antialiased; font-size: 14px; line-height: 1.4; margin: 0; padding: 0; -ms-text-size-adjust: 100%; -webkit-text-size-adjust: 100%; } table { border-collapse: separate; mso-table-lspace: 0pt; mso-table-rspace: 0pt; width: 100%; } table td { font-family: sans-serif; font-size: 14px; vertical-align: top; } /* ------------------------------------- BODY & CONTAINER ------------------------------------- */ .body { background-color: #f6f6f6; width: 100%; } /* Set a max-width, and make it display as block so it will automatically stretch to that width, but will also shrink down on a phone or something */ .container { display: block; margin: 0 auto !important; /* makes it centered */ max-width: 580px; padding: 10px; width: 580px; } .content { box-sizing: border-box; display: block; margin: 0 auto; max-width: 580px; padding: 10px; } /* ------------------------------------- HEADER, FOOTER, MAIN ------------------------------------- */ .main { background: #ffffff; border-radius: 3px; width: 100%; } .wrapper { box-sizing: border-box; padding: 20px; } .content-block { padding-bottom: 10px; padding-top: 10px; } .footer { clear: both; margin-top: 10px; text-align: center; width: 100%; } .footer td, .footer p, .footer span, .footer a { color: #999999; font-size: 14px; text-align: center; } /* ------------------------------------- TYPOGRAPHY ------------------------------------- */ h1, h2, h3, h4 { color: #000000; font-family: sans-serif; font-weight: 400; line-height: 1.4; margin: 0; margin-bottom: 30px; } h1 { font-size: 35px; font-weight: 300; text-align: center; text-transform: capitalize; } p, ul, ol { font-family: sans-serif; font-size: 14px; font-weight: normal; margin: 0; margin-bottom: 15px; } p li, ul li, ol li { list-style-position: inside; margin-left: 5px; } a { color: #3498db; text-decoration: underline; } /* ------------------------------------- BUTTONS ------------------------------------- */ .btn { box-sizing: border-box; width: 100%; } .btn > tbody > tr > td { padding-bottom: 15px; } .btn table { width: auto; } .btn table td { background-color: #ffffff; border-radius: 5px; text-align: center; } .btn a { background-color: #ffffff; border: solid 1px #3498db; border-radius: 5px; box-sizing: border-box; color: #3498db; cursor: pointer; display: inline-block; font-size: 14px; font-weight: bold; margin: 0; padding: 12px 25px; text-decoration: none; text-transform: capitalize; } .btn-primary table td { background-color: #3498db; } .btn-primary a { background-color: #3498db; border-color: #3498db; color: #ffffff; } /* ------------------------------------- OTHER STYLES THAT MIGHT BE USEFUL ------------------------------------- */ .last { margin-bottom: 0; } .first { margin-top: 0; } .align-center { text-align: center; } .align-right { text-align: right; } .align-left { text-align: left; } .clear { clear: both; } .mt0 { margin-top: 0; } .mb0 { margin-bottom: 0; } .preheader { color: transparent; display: none; height: 0; max-height: 0; max-width: 0; opacity: 0; overflow: hidden; mso-hide: all; visibility: hidden; width: 0; } .powered-by a { text-decoration: none; font-size: 14px; } hr { border: 0; border-bottom: 1px solid #f6f6f6; margin: 20px 0; } /* ------------------------------------- RESPONSIVE AND MOBILE FRIENDLY STYLES ------------------------------------- */ @media only screen and (max-width: 620px) { table[class=body] h1 { font-size: 28px !important; margin-bottom: 10px !important; } table[class=body] p, table[class=body] ul, table[class=body] ol, table[class=body] td, table[class=body] span, table[class=body] a { font-size: 16px !important; } table[class=body] .wrapper, table[class=body] .article { padding: 10px !important; } table[class=body] .content { padding: 0 !important; } table[class=body] .container { padding: 0 !important; width: 100% !important; } table[class=body] .main { border-left-width: 0 !important; border-radius: 0 !important; border-right-width: 0 !important; } table[class=body] .btn table { width: 100% !important; } table[class=body] .btn a { width: 100% !important; } table[class=body] .img-responsive { height: auto !important; max-width: 100% !important; width: auto !important; } } /* ------------------------------------- PRESERVE THESE STYLES IN THE HEAD ------------------------------------- */ @media all { .ExternalClass { width: 100%; } .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div { line-height: 100%; } .apple-link a { color: inherit !important; font-family: inherit !important; font-size: inherit !important; font-weight: inherit !important; line-height: inherit !important; text-decoration: none !important; } #MessageViewBody a { color: inherit; text-decoration: none; font-size: inherit; font-family: inherit; font-weight: inherit; line-height: inherit; } .btn-primary table td:hover { background-color: #34495e !important; } .btn-primary a:hover { background-color: #34495e !important; border-color: #34495e !important; } } </style> </head> <body class=""> <!-- <span class="preheader">This is preheader text. Some clients will show this text as a preview.</span> --> <table role="presentation" border="0" cellpadding="0" cellspacing="0" class="body"> <tr> <td>&nbsp;</td> <td class="container"> <div class="content"> <!-- START CENTERED WHITE CONTAINER --> <table role="presentation" class="main"> <!-- START MAIN CONTENT AREA --> <tr> <td class="wrapper"> <table role="presentation" border="0" cellpadding="0" cellspacing="0"> <tr> <td><p>Hi there, <br>there are some new updates.</p>' html_frame_lower = '<p>From NYSCEC.</p></td> </tr> </table> </td> </tr> <!-- END MAIN CONTENT AREA --> </table> <!-- END CENTERED WHITE CONTAINER --> <!-- START FOOTER --> <div class="footer"> <table role="presentation" border="0" cellpadding="0" cellspacing="0"> <tr> <td class="content-block"> <span class="apple-link">You are receiving this letter due to the agreement of subscription.</span><br> Send your information to <u><a href="mailto:">your_mail</a></u> to unsubscribe. </td> </tr> <tr> <td class="content-block powered-by"> Provided by <u><a href="">acoustikue</a></u> (SukJoon Oh). </td> </tr> </table> </div> <!-- END FOOTER --> </div> </td> <td>&nbsp;</td> </tr> </table> </body></html>' msg.attach(MIMEText(content, 'plain')) msg.attach(MIMEText(html_frame_upper + generate_content_2(updates) + html_frame_lower, 'html')) smtp_server = smtplib.SMTP_SSL(NYSCEC_SMTP_SERVER, NYSCEC_SMTP_PORT) smtp_server.login(NYSCEC_ADMIN_ID, NYSCEC_ADMIN_PW) smtp_server.sendmail(msg['From'], msg["To"].split(","), msg.as_string()) del html_frame_upper del html_frame_lower # # script if __name__ == "__main__": pass
102.392857
5,940
0.635159
1,127
8,601
4.791482
0.269743
0.026667
0.038889
0.034074
0.168148
0.090926
0.066667
0.055185
0.055185
0.045556
0
0.033166
0.144634
8,601
83
5,941
103.626506
0.700829
0.031392
0
0.102564
0
0.076923
0.829145
0.100277
0
0
0
0
0
1
0.051282
false
0.051282
0.128205
0
0.205128
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
5d64d082cf2a573d7f77a4851292036897eed3b0
218
py
Python
combiner/ModelA.py
RafalSkolasinski/seldon-experiments
0deee3e9cf22526da1d4a44c8d5613173577ac16
[ "MIT" ]
1
2020-04-02T14:02:06.000Z
2020-04-02T14:02:06.000Z
combiner/ModelA.py
RafalSkolasinski/seldon-experiments
0deee3e9cf22526da1d4a44c8d5613173577ac16
[ "MIT" ]
null
null
null
combiner/ModelA.py
RafalSkolasinski/seldon-experiments
0deee3e9cf22526da1d4a44c8d5613173577ac16
[ "MIT" ]
null
null
null
import logging class ModelA(): def predict(self, X, feature_names=[]): logging.warning(X) logging.warning(feature_names) return [0] def tags(self): return {"uri": "model-a"}
16.769231
43
0.582569
26
218
4.807692
0.653846
0.192
0
0
0
0
0
0
0
0
0
0.006369
0.279817
218
12
44
18.166667
0.789809
0
0
0
0
0
0.045872
0
0
0
0
0
0
1
0.25
false
0
0.125
0.125
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
5d6d541da51a5c7ebb57534c891fb2a6e836bec0
1,755
py
Python
setup.py
eoq/vsphere-automation-sdk-python
4fb7d4878fe22476564af835e5eb099f1d4b38b4
[ "MIT" ]
null
null
null
setup.py
eoq/vsphere-automation-sdk-python
4fb7d4878fe22476564af835e5eb099f1d4b38b4
[ "MIT" ]
null
null
null
setup.py
eoq/vsphere-automation-sdk-python
4fb7d4878fe22476564af835e5eb099f1d4b38b4
[ "MIT" ]
null
null
null
#!/usr/bin/env python import os from setuptools import setup setup(name='vSphere Automation SDK', version='1.69.0', description='VMware vSphere Automation SDK for Python', url='https://github.com/vmware/vsphere-automation-sdk-python', author='VMware, Inc.', license='MIT', install_requires=[ 'lxml >= 4.3.0', 'pyVmomi >= 6.7', 'vapi-runtime @ file://localhost/{}/lib/vapi-runtime/vapi_runtime-2.25.0-py2.py3-none-any.whl'.format(os.getcwd()), 'vapi-client-bindings @ file://localhost/{}/lib/vapi-client-bindings/vapi_client_bindings-3.6.0-py2.py3-none-any.whl'.format(os.getcwd()), 'vapi-common-client @ file://localhost/{}/lib/vapi-common-client/vapi_common_client-2.25.0-py2.py3-none-any.whl'.format(os.getcwd()), 'vmc-client-bindings @ file://localhost/{}/lib/vmc-client-bindings/vmc_client_bindings-1.52.0-py2.py3-none-any.whl'.format(os.getcwd()), 'nsx-python-sdk @ file://localhost/{}/lib/nsx-python-sdk/nsx_python_sdk-3.1.2.1.1-py2.py3-none-any.whl'.format(os.getcwd()), 'nsx-policy-python-sdk @ file://localhost/{}/lib/nsx-policy-python-sdk/nsx_policy_python_sdk-3.1.2.1.1-py2.py3-none-any.whl'.format(os.getcwd()), 'nsx-vmc-policy-python-sdk @ file://localhost/{}/lib/nsx-vmc-policy-python-sdk/nsx_vmc_policy_python_sdk-3.1.2.1.1-py2.py3-none-any.whl'.format(os.getcwd()), 'nsx-vmc-aws-integration-python-sdk @ file://localhost/{}/lib/nsx-vmc-aws-integration-python-sdk/nsx_vmc_aws_integration_python_sdk-3.1.2.1.1-py2.py3-none-any.whl'.format(os.getcwd()), 'vmc-draas-client-bindings @ file://localhost/{}/lib/vmc-draas-client-bindings/vmc_draas_client_bindings-1.18.0-py2.py3-none-any.whl'.format(os.getcwd()), ] )
65
192
0.688889
282
1,755
4.198582
0.216312
0.091216
0.121622
0.098818
0.578547
0.535473
0.407939
0.321791
0.321791
0.261824
0
0.042092
0.106553
1,755
26
193
67.5
0.71301
0.011396
0
0
0
0.409091
0.717416
0.558824
0
0
0
0
0
1
0
true
0
0.090909
0
0.090909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
5d74f56fef68a84707573521521baab8c61800fa
950
py
Python
criterion/common/reduction/default.py
zhangzhengde0225/SwinTrack
526be17f8ef266cb924c6939bd8dda23e9b73249
[ "MIT" ]
143
2021-12-03T02:33:36.000Z
2022-03-29T00:01:48.000Z
criterion/common/reduction/default.py
zhangzhengde0225/SwinTrack
526be17f8ef266cb924c6939bd8dda23e9b73249
[ "MIT" ]
33
2021-12-03T10:32:05.000Z
2022-03-31T02:13:55.000Z
criterion/common/reduction/default.py
zhangzhengde0225/SwinTrack
526be17f8ef266cb924c6939bd8dda23e9b73249
[ "MIT" ]
24
2021-12-04T06:46:42.000Z
2022-03-30T07:57:47.000Z
def loss_mean_reduction_function(loss, *_): return loss.mean() def loss_sum_reduction_function(loss, *_): return loss.sum() def loss_reduce_by_weight(loss, pred, label, context): return (loss * context['sample_weight']).sum() def build_loss_reduction_function(loss_parameters: dict): if 'reduce' not in loss_parameters: loss_reduction_function = loss_mean_reduction_function else: loss_reduction_function_parameters = loss_parameters['reduce'] if loss_reduction_function_parameters == 'mean': loss_reduction_function = loss_mean_reduction_function elif loss_reduction_function_parameters == 'sum': loss_reduction_function = loss_sum_reduction_function elif loss_reduction_function_parameters == 'weighted': loss_reduction_function = loss_reduce_by_weight else: loss_reduction_function = None return loss_reduction_function
33.928571
70
0.732632
110
950
5.836364
0.218182
0.423676
0.359813
0.194704
0.375389
0.278816
0.278816
0
0
0
0
0
0.197895
950
27
71
35.185185
0.84252
0
0
0.2
0
0
0.042105
0
0
0
0
0
0
1
0.2
false
0
0
0.15
0.4
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
4
53766907eea0b3baeba9a03ecf21b62b2ee273e7
93
py
Python
tadataka/optimization/initializers.py
IshitaTakeshi/Tadataka
852c7afb904503005e51884408e1492ef0be836f
[ "Apache-2.0" ]
54
2019-11-15T16:30:34.000Z
2022-01-13T15:18:54.000Z
tadataka/optimization/initializers.py
IshitaTakeshi/Tadataka
852c7afb904503005e51884408e1492ef0be836f
[ "Apache-2.0" ]
11
2019-02-28T08:28:24.000Z
2020-04-07T04:47:12.000Z
tadataka/optimization/initializers.py
IshitaTakeshi/Tadataka
852c7afb904503005e51884408e1492ef0be836f
[ "Apache-2.0" ]
1
2020-02-26T13:59:40.000Z
2020-02-26T13:59:40.000Z
class BaseInitializer(object): def initialize(self): raise NotImplementedError()
23.25
35
0.72043
8
93
8.375
1
0
0
0
0
0
0
0
0
0
0
0
0.193548
93
3
36
31
0.893333
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
5383861636c23c0c9da56d35fd3df695acd77cf3
1,244
py
Python
eclcli/rca/rcaclient/v2/users.py
hanasuke/eclcli
a72191799986a02596d0d467253fd9f5ee03c5c8
[ "Apache-2.0" ]
32
2016-08-31T04:12:40.000Z
2020-12-11T04:49:57.000Z
eclcli/rca/rcaclient/v2/users.py
hanasuke/eclcli
a72191799986a02596d0d467253fd9f5ee03c5c8
[ "Apache-2.0" ]
27
2016-09-06T07:50:36.000Z
2021-09-14T09:46:03.000Z
eclcli/rca/rcaclient/v2/users.py
hanasuke/eclcli
a72191799986a02596d0d467253fd9f5ee03c5c8
[ "Apache-2.0" ]
24
2016-09-02T01:09:09.000Z
2021-01-19T09:14:16.000Z
from .. import base def getname(obj): try: return obj.name except AttributeError: return obj class User(base.Resource): HUMAN_ID = True def __repr__(self): return '<User: %s>' % getattr(self, 'name', 'unknown-name') def delete(self): self.manager.delete(self) def update(self, name=None): self.manager.update(self, name=name) class UserManager(base.BootingManagerWithFind): resource_class = User def get(self, user): return self._get("/users/%s" % getname(user), "user") def list(self): return self._list("/users", "users") def create(self, name="", password=None, **kwargs): body = { "user": {} } if name: body["user"]["name"] = name if password: body["user"]["password"] = password return self._create("/users", body, "user") def update(self, user, password=None): body = { "user": { } } if password: body['user']['password'] = password return self._update("/users/%s" % getname(user), body, "") def delete(self, user): return self._delete("/users/%s" % getname(user))
21.824561
67
0.542605
137
1,244
4.846715
0.262774
0.072289
0.058735
0.076807
0.13253
0.13253
0.13253
0.13253
0
0
0
0
0.305466
1,244
56
68
22.214286
0.768519
0
0
0.102564
0
0
0.094855
0
0
0
0
0
0
1
0.230769
false
0.153846
0.025641
0.102564
0.564103
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
538efab80a6406736ff7d8dd355f0730448ccc5f
60
py
Python
backtester/orderPlacer/__init__.py
mhawry/auquantoolbox
f88b052d04baa81575b6886f829bca71d9425196
[ "Apache-2.0" ]
91
2020-07-31T13:57:49.000Z
2022-01-21T14:16:47.000Z
backtester/orderPlacer/__init__.py
mhawry/auquantoolbox
f88b052d04baa81575b6886f829bca71d9425196
[ "Apache-2.0" ]
3
2021-02-26T14:43:10.000Z
2022-01-12T14:58:01.000Z
backtester/orderPlacer/__init__.py
mhawry/auquantoolbox
f88b052d04baa81575b6886f829bca71d9425196
[ "Apache-2.0" ]
30
2020-07-30T13:48:00.000Z
2022-03-09T14:20:36.000Z
__all__ = ["backtesting_order_placer", "base_order_placer"]
30
59
0.8
7
60
5.714286
0.714286
0.55
0
0
0
0
0
0
0
0
0
0
0.066667
60
1
60
60
0.714286
0
0
0
0
0
0.683333
0.4
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
539a384c7b51380ba4aeb24fe55a00f901818dda
90
py
Python
dailyfresh/df_goods/apps.py
luoyefeiwu/learn_python
e888537c538309d2600a302c0c6e92456dd785c0
[ "Apache-2.0" ]
null
null
null
dailyfresh/df_goods/apps.py
luoyefeiwu/learn_python
e888537c538309d2600a302c0c6e92456dd785c0
[ "Apache-2.0" ]
null
null
null
dailyfresh/df_goods/apps.py
luoyefeiwu/learn_python
e888537c538309d2600a302c0c6e92456dd785c0
[ "Apache-2.0" ]
null
null
null
from django.apps import AppConfig class DfGoodsConfig(AppConfig): name = 'df_goods'
15
33
0.755556
11
90
6.090909
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.166667
90
5
34
18
0.893333
0
0
0
0
0
0.088889
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
53b03b39397a16ec9fb38144dece87cb452712dd
93
py
Python
backlinks/apps.py
Animatr/pls
20bf4ad2f1f9969cd5d5b33e7d8b4de6eca6bc0b
[ "bzip2-1.0.6" ]
1
2019-11-17T04:49:48.000Z
2019-11-17T04:49:48.000Z
backlinks/apps.py
Animatr/pls
20bf4ad2f1f9969cd5d5b33e7d8b4de6eca6bc0b
[ "bzip2-1.0.6" ]
10
2019-12-04T22:56:31.000Z
2022-02-10T11:29:56.000Z
backlinks/apps.py
Animatr/Backlink-Takip
20bf4ad2f1f9969cd5d5b33e7d8b4de6eca6bc0b
[ "bzip2-1.0.6" ]
null
null
null
from django.apps import AppConfig class BacklinksConfig(AppConfig): name = 'backlinks'
15.5
33
0.763441
10
93
7.1
0.9
0
0
0
0
0
0
0
0
0
0
0
0.16129
93
5
34
18.6
0.910256
0
0
0
0
0
0.096774
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
53dce1b8e6b7c228865a91853060385c92beb7c2
52
py
Python
packages/vaex-hdf5/vaex/hdf5/_version.py
skywalk163/vaex
f5e38f1fa448ef78936e2cb6b3d026fda1f2a1c5
[ "MIT" ]
null
null
null
packages/vaex-hdf5/vaex/hdf5/_version.py
skywalk163/vaex
f5e38f1fa448ef78936e2cb6b3d026fda1f2a1c5
[ "MIT" ]
null
null
null
packages/vaex-hdf5/vaex/hdf5/_version.py
skywalk163/vaex
f5e38f1fa448ef78936e2cb6b3d026fda1f2a1c5
[ "MIT" ]
null
null
null
__version_tuple__ = (0, 5, 5) __version__ = '0.5.5'
17.333333
29
0.653846
9
52
2.777778
0.444444
0.16
0.24
0
0
0
0
0
0
0
0
0.136364
0.153846
52
2
30
26
0.431818
0
0
0
0
0
0.096154
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
53e3c552e20ccfcc1b2f7dda2d759e6f4c7b32d2
132
py
Python
app/models/__init__.py
cabralwilliams/python-social
8ec0e3965312c280f31cf74a8b1e2e77d0629324
[ "MIT" ]
null
null
null
app/models/__init__.py
cabralwilliams/python-social
8ec0e3965312c280f31cf74a8b1e2e77d0629324
[ "MIT" ]
null
null
null
app/models/__init__.py
cabralwilliams/python-social
8ec0e3965312c280f31cf74a8b1e2e77d0629324
[ "MIT" ]
null
null
null
from .User import User from .Post import Post from .Comment import Comment from .Upvote import Upvote from .Downvote import Downvote
26.4
30
0.818182
20
132
5.4
0.35
0
0
0
0
0
0
0
0
0
0
0
0.143939
132
5
30
26.4
0.955752
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
53e58c7840978058dffb112d0968b444a9f38217
116
py
Python
itsybitsy/__main__.py
jessegonzalez-life360/itsybitsy
b7a8ffeff1f883a3b54a900274bd33247dcb98f8
[ "Apache-2.0" ]
1
2021-01-22T23:50:58.000Z
2021-01-22T23:50:58.000Z
itsybitsy/__main__.py
etherops/itsybitsy
ce133cce7bd6fe8fc1c9944e644624583d8fb705
[ "Apache-2.0" ]
null
null
null
itsybitsy/__main__.py
etherops/itsybitsy
ce133cce7bd6fe8fc1c9944e644624583d8fb705
[ "Apache-2.0" ]
3
2021-01-04T22:22:12.000Z
2021-06-18T14:11:28.000Z
# Copyright # Copyright 2020 Life360, Inc # SPDX-License-Identifier: Apache-2.0 from .itsybitsy import main main()
19.333333
41
0.758621
16
116
5.5
0.875
0
0
0
0
0
0
0
0
0
0
0.09
0.137931
116
5
42
23.2
0.79
0.637931
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
53fa71737929aaa7a166257a3e132102c35a455f
380
py
Python
gigalixir/auth.py
oleks/gigalixir-cli
d1b1c303e24be548ddc895165e34652c378f4347
[ "MIT" ]
null
null
null
gigalixir/auth.py
oleks/gigalixir-cli
d1b1c303e24be548ddc895165e34652c378f4347
[ "MIT" ]
null
null
null
gigalixir/auth.py
oleks/gigalixir-cli
d1b1c303e24be548ddc895165e34652c378f4347
[ "MIT" ]
null
null
null
class AuthException(Exception): def __init__(self): # Call the base class constructor with the parameters it needs message = "Sorry, you do not have access to that app. Try passing the app name explicitly with the `-a` flag. If that doesn't work, try running `gigalixir login` or check your ~/.netrc file." super(AuthException, self).__init__(message)
63.333333
199
0.713158
55
380
4.781818
0.781818
0.053232
0
0
0
0
0
0
0
0
0
0
0.207895
380
5
200
76
0.873754
0.157895
0
0
0
0.25
0.562893
0
0
0
0
0
0
1
0.25
false
0.25
0
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
54d147471337a3befcd8c5823cb82d586449bb59
2,204
py
Python
preprocessing/data_preprocessor.py
wilfred-wulbou/ml_ids
e033c0cb59cb14dbefce93847602c75a69384d65
[ "MIT" ]
8
2021-04-05T07:42:29.000Z
2021-12-10T18:13:22.000Z
preprocessing/data_preprocessor.py
wilfred-wulbou/ml_ids
e033c0cb59cb14dbefce93847602c75a69384d65
[ "MIT" ]
null
null
null
preprocessing/data_preprocessor.py
wilfred-wulbou/ml_ids
e033c0cb59cb14dbefce93847602c75a69384d65
[ "MIT" ]
1
2021-07-12T00:01:29.000Z
2021-07-12T00:01:29.000Z
import numpy as np from sklearn.preprocessing import LabelEncoder from sklearn.base import BaseEstimator, TransformerMixin # from sklearn.externals import joblib import joblib # Data Preprocessing class AttributesRemover(BaseEstimator, TransformerMixin): def __init__(self, columns=['Flow ID','Src IP','Src Port','Dst IP','Protocol','Timestamp','Label']): self.columns=columns def fit(self, X, y=None): return self # nothing else to do def transform(self, X, y=None): return X.drop(columns=self.columns, axis=1) # CustomDataCleaner removes NaN,-Infinity, & +Infinity values from columns 'Flow Pkts/s' & 'Flow Byts/s' # and also fixes the datatype of both columns. class CustomDataCleaner(TransformerMixin): def __init__(self, *args, **kwargs): pass def fit(self, X, y=None): return self def transform(self, X, y=None): X = X.astype({'Flow Pkts/s':np.float64,'Flow Byts/s':np.float64}) # Remove nan and inf values in df return X[~X.isin([np.nan, np.inf, -np.inf]).any(1)] # source: https://stackoverflow.com/questions/46162855/fit-transform-takes-2-positional-arguments-but- # 3-were-given-with-labelbinarize # Fixes bug in LabelBinarizer.. class MyLabelEncoder(TransformerMixin): def __init__(self, *args, **kwargs): self.encoder = LabelEncoder(*args, **kwargs) def fit(self, x, y=0): self.encoder.fit(x) return self def transform(self, x, y=0): return self.encoder.transform(x) class AnomalyLabelEncoder(TransformerMixin): def __init__(self, *args, **kwargs): pass def fit(self, X, y=None): return self def transform(self, X, y=None): return ((X * 0) - 1) class BenignLabelEncoder(TransformerMixin): def __init__(self, *args, **kwargs): pass def fit(self, X, y=None): return self def transform(self, X, y=None): return ((X * 0) + 1) # IDSPipeline loads the saved pipeline from file. class IDSPipelineLoader(object): def __init__(self, pipeline_filename): self.ids_pipeline = joblib.load(pipeline_filename) def getPipeline(self): return self.ids_pipeline
35.548387
104
0.669691
290
2,204
4.993103
0.351724
0.03453
0.041436
0.055249
0.287983
0.279696
0.254144
0.234807
0.196823
0.196823
0
0.012593
0.20735
2,204
62
105
35.548387
0.816256
0.212341
0
0.434783
0
0
0.041136
0
0
0
0
0
0
1
0.369565
false
0.065217
0.086957
0.195652
0.826087
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
1
1
0
0
4
54d1d7535092b7a7616ec819ca9c7cc2b6e1c7ac
38
py
Python
example.py
d222nguy/pygcn_new
dafe7d7a2d1b68a641d05428aee1f8a58250a7e6
[ "MIT" ]
null
null
null
example.py
d222nguy/pygcn_new
dafe7d7a2d1b68a641d05428aee1f8a58250a7e6
[ "MIT" ]
null
null
null
example.py
d222nguy/pygcn_new
dafe7d7a2d1b68a641d05428aee1f8a58250a7e6
[ "MIT" ]
null
null
null
i am creating the new example.py file
19
37
0.789474
8
38
3.75
1
0
0
0
0
0
0
0
0
0
0
0
0.184211
38
1
38
38
0.967742
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
54f2be64130b1d10a81fa4100cf08da0fb970011
25,210
py
Python
t_speech_assisstant/venv/lib/python3.7/site-packages/AddressBook/_metadata.py
TEgo17/SpeechAssistant
0e16dd4757f1b05f08ecbb4f4956b2ccd1a63c10
[ "MIT" ]
1
2020-09-01T16:24:50.000Z
2020-09-01T16:24:50.000Z
t_speech_assisstant/venv/lib/python3.7/site-packages/AddressBook/_metadata.py
TEgo17/SpeechAssistant
0e16dd4757f1b05f08ecbb4f4956b2ccd1a63c10
[ "MIT" ]
1
2021-01-28T20:40:19.000Z
2021-01-28T20:40:19.000Z
t_speech_assisstant/venv/lib/python3.7/site-packages/AddressBook/_metadata.py
TEgo17/SpeechAssistant
0e16dd4757f1b05f08ecbb4f4956b2ccd1a63c10
[ "MIT" ]
null
null
null
# This file is generated by objective.metadata # # Last update: Mon Jul 18 11:30:07 2016 import objc, sys if sys.maxsize > 2 ** 32: def sel32or64(a, b): return b else: def sel32or64(a, b): return a if sys.byteorder == "little": def littleOrBig(a, b): return a else: def littleOrBig(a, b): return b misc = {} misc.update( { "ABRecordRef": objc.createStructType( "ABRecordRef", b"{__ABBookflags=b1b1b1b1b1b1b1b1b1b1b1b21}", [ "hasUnsavedChanges", "readOnly", "importMe", "needConversion", "cleanedUp", "importTips", "restoreFromMetaData", "prefsNeedSync", "waitingForReset", "enforcesConstraints", "tracksAllSources", "_reserved", ], ), "ABAddressBookRef": objc.createStructType( "ABAddressBookRef", b"{__ABAddressBookRef=}", [] ), "ABMutableMultiValueRef": objc.createStructType( "ABMutableMultiValueRef", b"{__ABMultiValue=}", [] ), "ABActionEnabledCallback": objc.createStructType( "ABActionEnabledCallback", b"{__ABMultiValue=}", [] ), "ABPickerAttributes": objc.createStructType( "ABPickerAttributes", b"{OpaqueABPicker=}", [] ), "ABGroupRef": objc.createStructType("ABGroupRef", b"{__ABGroup=}", []), "ABSearchElementRef": objc.createStructType( "ABSearchElementRef", b"{__ABSearchElementRef=}", [] ), "ABPersonRef": objc.createStructType("ABPersonRef", b"{__ABPerson=}", []), "ABMultiValueRef": objc.createStructType( "ABMultiValueRef", b"{__ABMultiValue=}", [] ), "ABPeoplePickerSelectionBehavior": objc.createStructType( "ABPeoplePickerSelectionBehavior", b"{__ABBookflags=b1b1b1b1b1b1b1b1b1b1b1b21}", [ "hasUnsavedChanges", "readOnly", "importMe", "needConversion", "cleanedUp", "importTips", "restoreFromMetaData", "prefsNeedSync", "waitingForReset", "enforcesConstraints", "tracksAllSources", "_reserved", ], ), "ABPickerRef": objc.createStructType("ABPickerRef", b"{OpaqueABPicker=}", []), } ) constants = """$ABAddressBookErrorDomain$ABMultiValueIdentifiersErrorKey$ABPeoplePickerDisplayedPropertyDidChangeNotification$ABPeoplePickerGroupSelectionDidChangeNotification$ABPeoplePickerNameSelectionDidChangeNotification$ABPeoplePickerValueSelectionDidChangeNotification$kABAIMHomeLabel$kABAIMInstantProperty$kABAIMMobileMeLabel$kABAIMWorkLabel$kABAddressCityKey$kABAddressCountryCodeKey$kABAddressCountryKey$kABAddressHomeLabel$kABAddressProperty$kABAddressStateKey$kABAddressStreetKey$kABAddressWorkLabel$kABAddressZIPKey$kABAlternateBirthdayComponentsProperty$kABAnniversaryLabel$kABAssistantLabel$kABBirthdayComponentsProperty$kABBirthdayProperty$kABBrotherLabel$kABCalendarURIsProperty$kABChildLabel$kABCreationDateProperty$kABDatabaseChangedExternallyNotification$kABDatabaseChangedNotification$kABDeletedRecords$kABDepartmentProperty$kABEmailHomeLabel$kABEmailMobileMeLabel$kABEmailProperty$kABEmailWorkLabel$kABFatherLabel$kABFirstNamePhoneticProperty$kABFirstNameProperty$kABFriendLabel$kABGroupNameProperty$kABHomeLabel$kABHomePageLabel$kABHomePageProperty$kABICQHomeLabel$kABICQInstantProperty$kABICQWorkLabel$kABInsertedRecords$kABInstantMessageProperty$kABInstantMessageServiceAIM$kABInstantMessageServiceFacebook$kABInstantMessageServiceGaduGadu$kABInstantMessageServiceGoogleTalk$kABInstantMessageServiceICQ$kABInstantMessageServiceJabber$kABInstantMessageServiceKey$kABInstantMessageServiceMSN$kABInstantMessageServiceQQ$kABInstantMessageServiceSkype$kABInstantMessageServiceYahoo$kABInstantMessageUsernameKey$kABJabberHomeLabel$kABJabberInstantProperty$kABJabberWorkLabel$kABJobTitleProperty$kABLastNamePhoneticProperty$kABLastNameProperty$kABMSNHomeLabel$kABMSNInstantProperty$kABMSNWorkLabel$kABMaidenNameProperty$kABManagerLabel$kABMiddleNamePhoneticProperty$kABMiddleNameProperty$kABMobileMeLabel$kABModificationDateProperty$kABMotherLabel$kABNicknameProperty$kABNoteProperty$kABOrganizationPhoneticProperty$kABOrganizationProperty$kABOtherDateComponentsProperty$kABOtherDatesProperty$kABOtherLabel$kABParentLabel$kABPartnerLabel$kABPersonFlags$kABPhoneHomeFAXLabel$kABPhoneHomeLabel$kABPhoneMainLabel$kABPhoneMobileLabel$kABPhonePagerLabel$kABPhoneProperty$kABPhoneWorkFAXLabel$kABPhoneWorkLabel$kABPhoneiPhoneLabel$kABRelatedNamesProperty$kABSisterLabel$kABSocialProfileProperty$kABSocialProfileServiceFacebook$kABSocialProfileServiceFlickr$kABSocialProfileServiceKey$kABSocialProfileServiceLinkedIn$kABSocialProfileServiceMySpace$kABSocialProfileServiceSinaWeibo$kABSocialProfileServiceTencentWeibo$kABSocialProfileServiceTwitter$kABSocialProfileServiceYelp$kABSocialProfileURLKey$kABSocialProfileUserIdentifierKey$kABSocialProfileUsernameKey$kABSpouseLabel$kABSuffixProperty$kABTitleProperty$kABUIDProperty$kABURLsProperty$kABUpdatedRecords$kABWorkLabel$kABYahooHomeLabel$kABYahooInstantProperty$kABYahooWorkLabel$""" enums = """$ABAddRecordsError@1001$ABMultipleValueSelection@2$ABNoValueSelection@0$ABPropertyReadOnlyError@1014$ABPropertyUnsupportedBySourceError@1013$ABPropertyValueValidationError@1012$ABRemoveRecordsError@1002$ABSingleValueSelection@1$kABArrayProperty@5$kABBitsInBitFieldMatch@11$kABContainsSubString@7$kABContainsSubStringCaseInsensitive@8$kABDataProperty@7$kABDateComponentsProperty@8$kABDateProperty@4$kABDefaultNameOrdering@0$kABDictionaryProperty@6$kABDoesNotContainSubString@12$kABDoesNotContainSubStringCaseInsensitive@13$kABEqual@0$kABEqualCaseInsensitive@6$kABErrorInProperty@0$kABFirstNameFirst@32$kABGreaterThan@4$kABGreaterThanOrEqual@5$kABIntegerProperty@2$kABLastNameFirst@16$kABLessThan@2$kABLessThanOrEqual@3$kABMultiArrayProperty@261$kABMultiDataProperty@263$kABMultiDateComponentsProperty@264$kABMultiDateProperty@260$kABMultiDictionaryProperty@262$kABMultiIntegerProperty@258$kABMultiRealProperty@259$kABMultiStringProperty@257$kABMultiValueMask@256$kABNameOrderingMask@56$kABNotEqual@1$kABNotEqualCaseInsensitive@14$kABNotWithinIntervalAroundToday@19$kABNotWithinIntervalAroundTodayYearless@20$kABNotWithinIntervalFromToday@23$kABNotWithinIntervalFromTodayYearless@24$kABPickerAllowGroupSelection@4$kABPickerAllowMultipleSelection@8$kABPickerMultipleValueSelection@2$kABPickerSingleValueSelection@1$kABPrefixMatch@9$kABPrefixMatchCaseInsensitive@10$kABRealProperty@3$kABSearchAnd@0$kABSearchOr@1$kABShowAsCompany@1$kABShowAsMask@7$kABShowAsPerson@0$kABShowAsResource@2$kABShowAsRoom@3$kABStringProperty@1$kABSuffixMatch@15$kABSuffixMatchCaseInsensitive@16$kABWithinIntervalAroundToday@17$kABWithinIntervalAroundTodayYearless@18$kABWithinIntervalFromToday@21$kABWithinIntervalFromTodayYearless@22$kEventABPeoplePickerDisplayedPropertyChanged@4$kEventABPeoplePickerGroupDoubleClicked@5$kEventABPeoplePickerGroupSelectionChanged@1$kEventABPeoplePickerNameDoubleClicked@6$kEventABPeoplePickerNameSelectionChanged@2$kEventABPeoplePickerValueSelectionChanged@3$kEventClassABPeoplePicker@1633841264$kEventParamABPickerRef@1633841264$""" misc.update({}) functions = { "ABMultiValueReplaceValue": ( sel32or64(b"B^{__ABMultiValue=}@l", b"B^{__ABMultiValue=}@q"), ), "ABPickerSetDelegate": (b"v^{OpaqueABPicker=}^{OpaqueHIObjectRef=}",), "ABAddRecord": (b"B^{__ABAddressBookRef=}@",), "ABPickerClearSearchField": (b"v^{OpaqueABPicker=}",), "ABPickerSelectGroup": (b"v^{OpaqueABPicker=}^{__ABGroup=}B",), "ABCopyDefaultCountryCode": ( b"^{__CFString=}^{__ABAddressBookRef=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerGetAttributes": (sel32or64(b"L^{OpaqueABPicker=}", b"I^{OpaqueABPicker=}"),), "ABSave": (b"B^{__ABAddressBookRef=}",), "ABPersonCreateWithVCardRepresentation": ( b"^{__ABPerson=}^{__CFData=}", "", {"retval": {"already_cfretained": True}}, ), "ABGroupAddMember": (b"B^{__ABGroup=}^{__ABPerson=}",), "ABPersonCreateSearchElement": ( sel32or64( b"^{__ABSearchElementRef=}^{__CFString=}^{__CFString=}^{__CFString=}@l", b"^{__ABSearchElementRef=}^{__CFString=}^{__CFString=}^{__CFString=}@q", ), "", {"retval": {"already_cfretained": True}}, ), "ABMultiValueRemove": (sel32or64(b"B^{__ABMultiValue=}l", b"B^{__ABMultiValue=}q"),), "ABSearchElementMatchesRecord": (b"B^{__ABSearchElementRef=}@",), "ABRecordCopyRecordType": ( b"^{__CFString=}@", "", {"retval": {"already_cfretained": True}}, ), "ABPickerSelectIdentifier": (b"v^{OpaqueABPicker=}^{__ABPerson=}^{__CFString=}B",), "ABMultiValueCopyPrimaryIdentifier": ( b"^{__CFString=}^{__ABMultiValue=}", "", {"retval": {"already_cfretained": True}}, ), "ABMultiValueIndexForIdentifier": ( sel32or64( b"l^{__ABMultiValue=}^{__CFString=}", b"q^{__ABMultiValue=}^{__CFString=}" ), ), "ABPickerSelectRecord": (b"v^{OpaqueABPicker=}@B",), "ABMultiValueCreateMutableCopy": ( b"^{__ABMultiValue=}^{__ABMultiValue=}", "", {"retval": {"already_cfretained": True}}, ), "ABRecordRemoveValue": (b"B@^{__CFString=}",), "ABPickerCopySelectedGroups": ( b"^{__CFArray=}^{OpaqueABPicker=}", "", {"retval": {"already_cfretained": True}}, ), "ABMultiValueCopyIdentifierAtIndex": ( sel32or64( b"^{__CFString=}^{__ABMultiValue=}l", b"^{__CFString=}^{__ABMultiValue=}q" ), "", {"retval": {"already_cfretained": True}}, ), "ABCancelLoadingImageDataForTag": (sel32or64(b"vl", b"vq"),), "ABCopyArrayOfMatchingRecords": ( b"^{__CFArray=}^{__ABAddressBookRef=}^{__ABSearchElementRef=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerRemoveProperty": (b"v^{OpaqueABPicker=}^{__CFString=}",), "ABMultiValueCount": (sel32or64(b"l^{__ABMultiValue=}", b"q^{__ABMultiValue=}"),), "ABPickerCopySelectedIdentifiers": ( b"^{__CFArray=}^{OpaqueABPicker=}^{__ABPerson=}", "", {"retval": {"already_cfretained": True}}, ), "ABGroupCreate": (b"^{__ABGroup=}", "", {"retval": {"already_cfretained": True}}), "ABMultiValueCreateCopy": ( b"^{__ABMultiValue=}^{__ABMultiValue=}", "", {"retval": {"already_cfretained": True}}, ), "ABMultiValueAdd": ( b"B^{__ABMultiValue=}@^{__CFString=}^^{__CFString}", "", {"arguments": {3: {"type_modifier": "o"}}}, ), "ABHasUnsavedChanges": (b"B^{__ABAddressBookRef=}",), "ABMultiValueReplaceLabel": ( sel32or64( b"B^{__ABMultiValue=}^{__CFString=}l", b"B^{__ABMultiValue=}^{__CFString=}q" ), ), "ABGroupCopyParentGroups": ( b"^{__CFArray=}^{__ABGroup=}", "", {"retval": {"already_cfretained": True}}, ), "ABGroupSetDistributionIdentifier": ( b"B^{__ABGroup=}^{__ABPerson=}^{__CFString=}^{__CFString=}", ), "ABPickerDeselectRecord": (b"v^{OpaqueABPicker=}@",), "ABGroupCreateSearchElement": ( sel32or64( b"^{__ABSearchElementRef=}^{__CFString=}^{__CFString=}^{__CFString=}@l", b"^{__ABSearchElementRef=}^{__CFString=}^{__CFString=}^{__CFString=}@q", ), "", {"retval": {"already_cfretained": True}}, ), "ABCopyRecordTypeFromUniqueId": ( b"^{__CFString=}^{__ABAddressBookRef=}^{__CFString=}", "", {"retval": {"already_cfretained": True}}, ), "ABRemoveRecord": (b"B^{__ABAddressBookRef=}@",), "ABGroupCopyDistributionIdentifier": ( b"^{__CFString=}^{__ABGroup=}^{__ABPerson=}^{__CFString=}", "", {"retval": {"already_cfretained": True}}, ), "ABPersonCopyImageData": ( b"^{__CFData=}^{__ABPerson=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerDeselectGroup": (b"v^{OpaqueABPicker=}^{__ABGroup=}",), "ABGroupRemoveGroup": (b"B^{__ABGroup=}^{__ABGroup=}",), "ABRemoveProperties": ( sel32or64( b"l^{__ABAddressBookRef=}^{__CFString=}^{__CFArray=}", b"q^{__ABAddressBookRef=}^{__CFString=}^{__CFArray=}", ), ), "ABGroupCopyArrayOfAllSubgroups": ( b"^{__CFArray=}^{__ABGroup=}", "", {"retval": {"already_cfretained": True}}, ), "ABMultiValueCreate": ( b"^{__ABMultiValue=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerSelectInAddressBook": (b"v^{OpaqueABPicker=}",), "ABGroupCopyArrayOfAllMembers": ( b"^{__CFArray=}^{__ABGroup=}", "", {"retval": {"already_cfretained": True}}, ), "ABSearchElementCreateWithConjunction": ( sel32or64( b"^{__ABSearchElementRef=}l^{__CFArray=}", b"^{__ABSearchElementRef=}q^{__CFArray=}", ), "", {"retval": {"already_cfretained": True}}, ), "ABMultiValueCopyValueAtIndex": ( sel32or64(b"@^{__ABMultiValue=}l", b"@^{__ABMultiValue=}q"), "", {"retval": {"already_cfretained": True}}, ), "ABPersonSetImageData": (b"B^{__ABPerson=}^{__CFData=}",), "ABCreateFormattedAddressFromDictionary": ( b"^{__CFString=}^{__ABAddressBookRef=}^{__CFDictionary=}", "", {"retval": {"already_cfretained": True}}, ), "ABRecordSetValue": (b"B@^{__CFString=}@",), "ABPickerGetDelegate": (b"^{OpaqueHIObjectRef=}^{OpaqueABPicker=}",), "ABPersonCreate": (b"^{__ABPerson=}", "", {"retval": {"already_cfretained": True}}), "ABCopyArrayOfAllPeople": ( b"^{__CFArray=}^{__ABAddressBookRef=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerCopyProperties": ( b"^{__CFArray=}^{OpaqueABPicker=}", "", {"retval": {"already_cfretained": True}}, ), "ABMultiValueSetPrimaryIdentifier": (b"B^{__ABMultiValue=}^{__CFString=}",), "ABPickerCopyDisplayedProperty": ( b"^{__CFString=}^{OpaqueABPicker=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerCopySelectedRecords": ( b"^{__CFArray=}^{OpaqueABPicker=}", "", {"retval": {"already_cfretained": True}}, ), "ABGroupAddGroup": (b"B^{__ABGroup=}^{__ABGroup=}",), "ABLocalizedPropertyOrLabel": (b"@@",), "ABMultiValueCreateMutable": ( b"^{__ABMultiValue=}", "", {"retval": {"already_cfretained": True}}, ), "ABRecordCreateCopy": (b"@@", "", {"retval": {"already_cfretained": True}}), "ABGetMe": (b"^{__ABPerson=}^{__ABAddressBookRef=}",), "ABPickerSetFrame": ( sel32or64( b"v^{OpaqueABPicker=}^{_NSRect={_NSPoint=ff}{_NSSize=ff}}", b"v^{OpaqueABPicker=}^{CGRect={CGPoint=dd}{CGSize=dd}}", ), "", {"arguments": {1: {"type_modifier": "n"}}}, ), "ABAddPropertiesAndTypes": ( sel32or64( b"l^{__ABAddressBookRef=}^{__CFString=}^{__CFDictionary=}", b"q^{__ABAddressBookRef=}^{__CFString=}^{__CFDictionary=}", ), ), "ABCopyRecordForUniqueId": ( b"@^{__ABAddressBookRef=}^{__CFString=}", "", {"retval": {"already_cfretained": True}}, ), "ABSetMe": (b"v^{__ABAddressBookRef=}^{__ABPerson=}",), "ABRecordCopyValue": ( b"@@^{__CFString=}", "", {"retval": {"already_cfretained": True}}, ), "ABTypeOfProperty": ( sel32or64( b"l^{__ABAddressBookRef=}^{__CFString=}^{__CFString=}", b"q^{__ABAddressBookRef=}^{__CFString=}^{__CFString=}", ), ), "ABMultiValueInsert": ( sel32or64( b"B^{__ABMultiValue=}@^{__CFString=}l^^{__CFString}", b"B^{__ABMultiValue=}@^{__CFString=}q^^{__CFString}", ), "", {"arguments": {4: {"type_modifier": "o"}}}, ), "ABPickerAddProperty": (b"v^{OpaqueABPicker=}^{__CFString=}",), "ABMultiValueCopyLabelAtIndex": ( sel32or64( b"^{__CFString=}^{__ABMultiValue=}l", b"^{__CFString=}^{__ABMultiValue=}q" ), "", {"retval": {"already_cfretained": True}}, ), "ABPickerChangeAttributes": ( sel32or64(b"v^{OpaqueABPicker=}LL", b"v^{OpaqueABPicker=}II"), ), "ABPickerDeselectAll": (b"v^{OpaqueABPicker=}",), "ABBeginLoadingImageDataForClient": ( sel32or64(b"l^{__ABPerson=}^?^v", b"q^{__ABPerson=}^?^v"), "", { "arguments": { 1: { "callable": { "retval": {"type": b"v"}, "arguments": { 0: {"type": b"^{__CFData=}"}, 1: {"type": b"l"}, 2: {"type": b"^v"}, }, } } } }, ), "ABGetSharedAddressBook": (b"^{__ABAddressBookRef=}",), "ABRecordIsReadOnly": (b"B@",), "ABPickerIsVisible": (b"B^{OpaqueABPicker=}",), "ABRecordCopyUniqueId": ( b"^{__CFString=}@", "", {"retval": {"already_cfretained": True}}, ), "ABCopyArrayOfAllGroups": ( b"^{__CFArray=}^{__ABAddressBookRef=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerCreate": ( b"^{OpaqueABPicker=}", "", {"retval": {"already_cfretained": True}}, ), "ABGroupRemoveMember": (b"B^{__ABGroup=}^{__ABPerson=}",), "ABPickerDeselectIdentifier": (b"v^{OpaqueABPicker=}^{__ABPerson=}^{__CFString=}",), "ABPickerSetColumnTitle": (b"v^{OpaqueABPicker=}^{__CFString=}^{__CFString=}",), "ABPickerCopySelectedValues": ( b"^{__CFArray=}^{OpaqueABPicker=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerGetFrame": ( sel32or64( b"v^{OpaqueABPicker=}^{_NSRect={_NSPoint=ff}{_NSSize=ff}}", b"v^{OpaqueABPicker=}^{CGRect={CGPoint=dd}{CGSize=dd}}", ), "", {"arguments": {1: {"type_modifier": "o"}}}, ), "ABMultiValuePropertyType": ( sel32or64(b"l^{__ABMultiValue=}", b"q^{__ABMultiValue=}"), ), "ABPersonCopyVCardRepresentation": ( b"^{__CFData=}^{__ABPerson=}", "", {"retval": {"already_cfretained": True}}, ), "ABCopyArrayOfPropertiesForRecordType": ( b"^{__CFArray=}^{__ABAddressBookRef=}^{__CFString=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerCopyColumnTitle": ( b"^{__CFString=}^{OpaqueABPicker=}^{__CFString=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerSetVisibility": (b"v^{OpaqueABPicker=}B",), "ABPersonCopyParentGroups": ( b"^{__CFArray=}^{__ABPerson=}", "", {"retval": {"already_cfretained": True}}, ), "ABCopyLocalizedPropertyOrLabel": ( b"^{__CFString=}^{__CFString=}", "", {"retval": {"already_cfretained": True}}, ), "ABPickerEditInAddressBook": (b"v^{OpaqueABPicker=}",), "ABPickerSetDisplayedProperty": (b"v^{OpaqueABPicker=}^{__CFString=}",), } cftypes = [ ("ABAddressBookRef", b"^{__ABAddressBookRef=}", None, "ABAddressBook"), ("ABGroupRef", b"^{__ABGroup=}", None, "ABGroup"), ("ABMultiValueRef", b"^{__ABMultiValue=}", None, "ABMultiValue"), ("ABMutableMultiValueRef", b"^{__ABMultiValue=}", None, "ABMutableMultiValue"), ("ABPersonRef", b"^{__ABPerson=}", None, "ABPerson"), ("ABSearchElementRef", b"^{__ABSearchElementRef=}", None, "ABSearchElement"), ("ABPickerRef", b"^{OpaqueABPicker}", None, "ABPeoplePickerView"), ] r = objc.registerMetaDataForSelector objc._updatingMetadata(True) try: r(b"ABAddressBook", b"addRecord:", {"retval": {"type": "Z"}}) r( b"ABAddressBook", b"addRecord:error:", {"retval": {"type": "Z"}, "arguments": {3: {"type_modifier": b"o"}}}, ) r(b"ABAddressBook", b"hasUnsavedChanges", {"retval": {"type": "Z"}}) r(b"ABAddressBook", b"removeRecord:", {"retval": {"type": "Z"}}) r( b"ABAddressBook", b"removeRecord:error:", {"retval": {"type": "Z"}, "arguments": {3: {"type_modifier": b"o"}}}, ) r(b"ABAddressBook", b"save", {"retval": {"type": "Z"}}) r( b"ABAddressBook", b"saveAndReturnError:", {"retval": {"type": "Z"}, "arguments": {2: {"type_modifier": b"o"}}}, ) r(b"ABGroup", b"addMember:", {"retval": {"type": "Z"}}) r(b"ABGroup", b"addSubgroup:", {"retval": {"type": "Z"}}) r(b"ABGroup", b"removeMember:", {"retval": {"type": "Z"}}) r(b"ABGroup", b"removeSubgroup:", {"retval": {"type": "Z"}}) r( b"ABGroup", b"setDistributionIdentifier:forProperty:person:", {"retval": {"type": "Z"}}, ) r(b"ABMutableMultiValue", b"removeValueAndLabelAtIndex:", {"retval": {"type": "Z"}}) r( b"ABMutableMultiValue", b"replaceLabelAtIndex:withLabel:", {"retval": {"type": "Z"}}, ) r( b"ABMutableMultiValue", b"replaceValueAtIndex:withValue:", {"retval": {"type": "Z"}}, ) r(b"ABMutableMultiValue", b"setPrimaryIdentifier:", {"retval": {"type": "Z"}}) r(b"ABPeoplePickerView", b"allowsGroupSelection", {"retval": {"type": "Z"}}) r(b"ABPeoplePickerView", b"allowsMultipleSelection", {"retval": {"type": "Z"}}) r( b"ABPeoplePickerView", b"selectGroup:byExtendingSelection:", {"arguments": {3: {"type": "Z"}}}, ) r( b"ABPeoplePickerView", b"selectIdentifier:forPerson:byExtendingSelection:", {"arguments": {4: {"type": "Z"}}}, ) r( b"ABPeoplePickerView", b"selectRecord:byExtendingSelection:", {"arguments": {3: {"type": "Z"}}}, ) r( b"ABPeoplePickerView", b"setAllowsGroupSelection:", {"arguments": {2: {"type": "Z"}}}, ) r( b"ABPeoplePickerView", b"setAllowsMultipleSelection:", {"arguments": {2: {"type": "Z"}}}, ) r( b"ABPeoplePickerView", b"setGroupDoubleAction:", {"arguments": {2: {"sel_of_type": b"v@:@"}}}, ) r( b"ABPeoplePickerView", b"setNameDoubleAction:", {"arguments": {2: {"sel_of_type": b"v@:@"}}}, ) r(b"ABPerson", b"setImageData:", {"retval": {"type": "Z"}}) r(b"ABPersonView", b"editing", {"retval": {"type": "Z"}}) r(b"ABPersonView", b"setEditing:", {"arguments": {2: {"type": "Z"}}}) r(b"ABPersonView", b"setShouldShowLinkedPeople:", {"arguments": {2: {"type": b"Z"}}}) r(b"ABPersonView", b"shouldShowLinkedPeople", {"retval": {"type": b"Z"}}) r(b"ABRecord", b"isReadOnly", {"retval": {"type": "Z"}}) r(b"ABRecord", b"removeValueForProperty:", {"retval": {"type": "Z"}}) r(b"ABRecord", b"setValue:forProperty:", {"retval": {"type": "Z"}}) r( b"ABRecord", b"setValue:forProperty:error:", {"retval": {"type": "Z"}, "arguments": {4: {"type_modifier": b"o"}}}, ) r(b"ABSearchElement", b"matchesRecord:", {"retval": {"type": "Z"}}) r(b"NSObject", b"actionProperty", {"retval": {"type": b"@"}}) r( b"NSObject", b"consumeImageData:forTag:", { "required": True, "retval": {"type": b"v"}, "arguments": {2: {"type": b"@"}, 3: {"type": sel32or64(b"i", b"q")}}, }, ) r( b"NSObject", b"performActionForPerson:identifier:", {"retval": {"type": b"v"}, "arguments": {2: {"type": b"@"}, 3: {"type": b"@"}}}, ) r( b"NSObject", b"shouldEnableActionForPerson:identifier:", {"retval": {"type": "Z"}, "arguments": {2: {"type": b"@"}, 3: {"type": b"@"}}}, ) r( b"NSObject", b"titleForPerson:identifier:", {"retval": {"type": b"@"}, "arguments": {2: {"type": b"@"}, 3: {"type": b"@"}}}, ) finally: objc._updatingMetadata(False) protocols = { "ABActionDelegate": objc.informal_protocol( "ABActionDelegate", [ objc.selector( None, b"shouldEnableActionForPerson:identifier:", b"Z@:@@", isRequired=False, ), objc.selector(None, b"actionProperty", b"@@:", isRequired=False), objc.selector( None, b"performActionForPerson:identifier:", b"v@:@@", isRequired=False ), objc.selector( None, b"titleForPerson:identifier:", b"@@:@@", isRequired=False ), ], ) } expressions = {} # END OF FILE
42.801358
2,846
0.61551
1,701
25,210
8.858319
0.292769
0.036236
0.064109
0.075259
0.289488
0.233873
0.188943
0.112822
0.102867
0.081763
0
0.015616
0.204958
25,210
588
2,847
42.87415
0.736167
0.003729
0
0.514938
1
0.003515
0.612958
0.426546
0
0
0
0
0
1
0.00703
false
0
0.008787
0.00703
0.022847
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
54f6a58c33e8e4d7e656011f643d413b54575a16
64
py
Python
todolist/api/__init__.py
GArmane/python-fastapi-hex-todo
f54fbe0ecd6e158c1ed54a9a7a8ed6bd9d96d39d
[ "MIT" ]
60
2020-03-27T10:06:47.000Z
2022-03-23T23:05:12.000Z
todolist/api/__init__.py
GArmane/python-fastapi-hex-todo
f54fbe0ecd6e158c1ed54a9a7a8ed6bd9d96d39d
[ "MIT" ]
null
null
null
todolist/api/__init__.py
GArmane/python-fastapi-hex-todo
f54fbe0ecd6e158c1ed54a9a7a8ed6bd9d96d39d
[ "MIT" ]
11
2020-03-22T01:31:20.000Z
2021-12-20T21:03:20.000Z
__all__ = ("init_app",) from todolist.api.app import init_app
12.8
37
0.734375
10
64
4.1
0.7
0.341463
0
0
0
0
0
0
0
0
0
0
0.140625
64
4
38
16
0.745455
0
0
0
0
0
0.125
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
54fc672c51e294d26e8b211ad7befbc21f4f40a5
542
py
Python
sklearned/embeddings/allembeddings.py
microprediction/sklearned
beaf6c73d09a0bc13cf97edf617ce5d836e6a1a5
[ "MIT" ]
null
null
null
sklearned/embeddings/allembeddings.py
microprediction/sklearned
beaf6c73d09a0bc13cf97edf617ce5d836e6a1a5
[ "MIT" ]
1
2021-11-04T23:01:45.000Z
2021-11-04T23:01:45.000Z
sklearned/embeddings/allembeddings.py
microprediction/sklearned
beaf6c73d09a0bc13cf97edf617ce5d836e6a1a5
[ "MIT" ]
null
null
null
from sklearned.embeddings.kerasmodels import KERAS_EMBEDDINGS from sklearned.embeddings.keraslstm import KERAS_LSTM_MODELS from sklearned.embeddings.kerastcn import KERAS_TCN_MODELS from sklearned.embeddings.kerascnn import KERAS_CNN_MODELS from sklearned.embeddings.kerasnearby import KERAS_NEARBY EMBEDDINGS = KERAS_EMBEDDINGS + KERAS_LSTM_MODELS + KERAS_TCN_MODELS + KERAS_CNN_MODELS + KERAS_NEARBY def embedding_from_name(name): valid = [f for f in EMBEDDINGS if f.__name__ == name] return valid[0] if len(valid) == 1 else None
45.166667
102
0.830258
76
542
5.631579
0.381579
0.151869
0.268692
0.203271
0
0
0
0
0
0
0
0.004167
0.114391
542
12
103
45.166667
0.8875
0
0
0
0
0
0
0
0
0
0
0
0
1
0.111111
false
0
0.555556
0
0.777778
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
0708a34d1a8ccac92cee2eda46f4ef4a4f509dc3
13,168
py
Python
tests/async/test_download.py
wangkev/playwright-python
b62c1dbd52364c3aa4ba001bad8f94ea43ad1fc5
[ "Apache-2.0" ]
null
null
null
tests/async/test_download.py
wangkev/playwright-python
b62c1dbd52364c3aa4ba001bad8f94ea43ad1fc5
[ "Apache-2.0" ]
null
null
null
tests/async/test_download.py
wangkev/playwright-python
b62c1dbd52364c3aa4ba001bad8f94ea43ad1fc5
[ "Apache-2.0" ]
null
null
null
# Copyright (c) Microsoft Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import os from asyncio.futures import Future from pathlib import Path from typing import Optional import pytest from playwright.async_api import Browser, Error, Page def assert_file_content(path, content): with open(path, "r") as fd: assert fd.read() == content @pytest.fixture(autouse=True) def after_each_hook(server): def handle_download(request): request.setHeader("Content-Type", "application/octet-stream") request.setHeader("Content-Disposition", "attachment") request.write(b"Hello world") request.finish() def handle_download_with_file_name(request): request.setHeader("Content-Type", "application/octet-stream") request.setHeader("Content-Disposition", "attachment; filename=file.txt") request.write(b"Hello world") request.finish() server.set_route("/download", handle_download) server.set_route("/downloadWithFilename", handle_download_with_file_name) yield async def test_should_report_downloads_with_accept_downloads_false(page: Page, server): await page.set_content( f'<a href="{server.PREFIX}/downloadWithFilename">download</a>' ) async with page.expect_download() as download_info: await page.click("a") download = await download_info.value assert download.url == f"{server.PREFIX}/downloadWithFilename" assert download.suggested_filename == "file.txt" assert ( repr(download) == f"<Download url={download.url!r} suggested_filename={download.suggested_filename!r}>" ) error: Optional[Error] = None try: await download.path() except Error as exc: error = exc failure_reason = await download.failure() assert failure_reason assert "accept_downloads" in failure_reason assert error assert "accept_downloads: True" in error.message async def test_should_report_downloads_with_accept_downloads_true(browser, server): page = await browser.new_page(accept_downloads=True) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download = await download_info.value path = await download.path() assert os.path.isfile(path) assert_file_content(path, "Hello world") await page.close() async def test_should_save_to_user_specified_path(tmpdir: Path, browser, server): page = await browser.new_page(accept_downloads=True) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download = await download_info.value user_path = tmpdir / "download.txt" await download.save_as(user_path) assert user_path.exists() assert user_path.read_text("utf-8") == "Hello world" await page.close() async def test_should_save_to_user_specified_path_without_updating_original_path( tmpdir, browser, server ): page = await browser.new_page(accept_downloads=True) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download = await download_info.value user_path = tmpdir / "download.txt" await download.save_as(user_path) assert user_path.exists() assert user_path.read_text("utf-8") == "Hello world" originalPath = Path(await download.path()) assert originalPath.exists() assert originalPath.read_text("utf-8") == "Hello world" await page.close() async def test_should_save_to_two_different_paths_with_multiple_save_as_calls( tmpdir, browser, server ): page = await browser.new_page(accept_downloads=True) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download = await download_info.value user_path = tmpdir / "download.txt" await download.save_as(user_path) assert user_path.exists() assert user_path.read_text("utf-8") == "Hello world" anotheruser_path = tmpdir / "download (2).txt" await download.save_as(anotheruser_path) assert anotheruser_path.exists() assert anotheruser_path.read_text("utf-8") == "Hello world" await page.close() async def test_should_save_to_overwritten_filepath(tmpdir: Path, browser, server): page = await browser.new_page(accept_downloads=True) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download = await download_info.value user_path = tmpdir / "download.txt" await download.save_as(user_path) assert len(list(Path(tmpdir).glob("*.*"))) == 1 await download.save_as(user_path) assert len(list(Path(tmpdir).glob("*.*"))) == 1 assert user_path.exists() assert user_path.read_text("utf-8") == "Hello world" await page.close() async def test_should_create_subdirectories_when_saving_to_non_existent_user_specified_path( tmpdir, browser, server ): page = await browser.new_page(accept_downloads=True) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download = await download_info.value nested_path = tmpdir / "these" / "are" / "directories" / "download.txt" await download.save_as(nested_path) assert nested_path.exists() assert nested_path.read_text("utf-8") == "Hello world" await page.close() async def test_should_error_when_saving_with_downloads_disabled( tmpdir, browser, server ): page = await browser.new_page(accept_downloads=False) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download = await download_info.value user_path = tmpdir / "download.txt" with pytest.raises(Error) as exc: await download.save_as(user_path) assert ( "Pass { accept_downloads: True } when you are creating your browser context" in exc.value.message ) await page.close() async def test_should_error_when_saving_after_deletion(tmpdir, browser, server): page = await browser.new_page(accept_downloads=True) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download = await download_info.value user_path = tmpdir / "download.txt" await download.delete() with pytest.raises(Error) as exc: await download.save_as(user_path) assert "Target page, context or browser has been closed" in exc.value.message await page.close() async def test_should_report_non_navigation_downloads(browser, server): # Mac WebKit embedder does not download in this case, although Safari does. def handle_download(request): request.setHeader("Content-Type", "application/octet-stream") request.write(b"Hello world") request.finish() server.set_route("/download", handle_download) page = await browser.new_page(accept_downloads=True) await page.goto(server.EMPTY_PAGE) await page.set_content( f'<a download="file.txt" href="{server.PREFIX}/download">download</a>' ) async with page.expect_download() as download_info: await page.click("a") download = await download_info.value assert download.suggested_filename == "file.txt" path = await download.path() assert os.path.exists(path) assert_file_content(path, "Hello world") await page.close() async def test_report_download_path_within_page_on_download_handler_for_files( browser: Browser, server ): page = await browser.new_page(accept_downloads=True) on_download_path: Future[str] = asyncio.Future() async def on_download(download): on_download_path.set_result(await download.path()) page.once( "download", lambda res: asyncio.create_task(on_download(res)), ) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') await page.click("a") path = await on_download_path assert_file_content(path, "Hello world") await page.close() async def test_download_report_download_path_within_page_on_handle_for_blobs( browser, server ): page = await browser.new_page(accept_downloads=True) on_download_path = asyncio.Future() async def on_download(download): on_download_path.set_result(await download.path()) page.once( "download", lambda res: asyncio.create_task(on_download(res)), ) await page.goto(server.PREFIX + "/download-blob.html") await page.click("a") path = await on_download_path assert_file_content(path, "Hello world") await page.close() @pytest.mark.only_browser("chromium") async def test_should_report_alt_click_downloads(browser, server): # Firefox does not download on alt-click by default. # Our WebKit embedder does not download on alt-click, although Safari does. def handle_download(request): request.setHeader("Content-Type", "application/octet-stream") request.write(b"Hello world") request.finish() server.set_route("/download", handle_download) page = await browser.new_page(accept_downloads=True) await page.goto(server.EMPTY_PAGE) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a", modifiers=["Alt"]) download = await download_info.value path = await download.path() assert os.path.exists(path) assert_file_content(path, "Hello world") await page.close() async def test_should_report_new_window_downloads(browser, server): # TODO: - the test fails in headful Chromium as the popup page gets closed along # with the session before download completed event arrives. # - WebKit doesn't close the popup page page = await browser.new_page(accept_downloads=True) await page.set_content( f'<a target=_blank href="{server.PREFIX}/download">download</a>' ) async with page.expect_download() as download_info: await page.click("a") download = await download_info.value path = await download.path() assert os.path.exists(path) await page.close() async def test_should_delete_file(browser, server): page = await browser.new_page(accept_downloads=True) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download = await download_info.value path = await download.path() assert os.path.exists(path) await download.delete() assert os.path.exists(path) is False await page.close() async def test_should_delete_downloads_on_context_destruction(browser, server): page = await browser.new_page(accept_downloads=True) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download1 = await download_info.value async with page.expect_download() as download_info: await page.click("a") download2 = await download_info.value path1 = await download1.path() path2 = await download2.path() assert os.path.exists(path1) assert os.path.exists(path2) await page.context.close() assert os.path.exists(path1) is False assert os.path.exists(path2) is False async def test_should_delete_downloads_on_browser_gone(browser_factory, server): browser = await browser_factory() page = await browser.new_page(accept_downloads=True) await page.set_content(f'<a href="{server.PREFIX}/download">download</a>') async with page.expect_download() as download_info: await page.click("a") download1 = await download_info.value async with page.expect_download() as download_info: await page.click("a") download2 = await download_info.value path1 = await download1.path() path2 = await download2.path() assert os.path.exists(path1) assert os.path.exists(path2) await browser.close() assert os.path.exists(path1) is False assert os.path.exists(path2) is False assert os.path.exists(os.path.join(path1, "..")) is False
37.19774
96
0.717421
1,789
13,168
5.094466
0.134153
0.052337
0.029186
0.031271
0.757845
0.736998
0.712859
0.703862
0.694755
0.679285
0
0.00322
0.174666
13,168
353
97
37.303116
0.835388
0.070474
0
0.697509
0
0
0.144354
0.075286
0
0
0
0.002833
0.170819
1
0.021352
false
0.003559
0.024911
0
0.046263
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
070fae25d13c4879b48c2cbc833263f12709dc90
85
py
Python
clase/apps.py
luisza/lectura_ciclica
843d87a7cdbcd9a39b8d1ee76ec884d4a35e8634
[ "Apache-2.0" ]
null
null
null
clase/apps.py
luisza/lectura_ciclica
843d87a7cdbcd9a39b8d1ee76ec884d4a35e8634
[ "Apache-2.0" ]
null
null
null
clase/apps.py
luisza/lectura_ciclica
843d87a7cdbcd9a39b8d1ee76ec884d4a35e8634
[ "Apache-2.0" ]
null
null
null
from django.apps import AppConfig class ClaseConfig(AppConfig): name = 'clase'
14.166667
33
0.741176
10
85
6.3
0.9
0
0
0
0
0
0
0
0
0
0
0
0.176471
85
5
34
17
0.9
0
0
0
0
0
0.058824
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
074a8af93568ba0ab1efa18af79a75585a76a7a9
9,236
py
Python
src/bct_price/main.py
0xJem/discord-bots
46ce60e9e64ba0643f8dbb15a4970a68482a4813
[ "MIT" ]
1
2021-12-24T03:05:43.000Z
2021-12-24T03:05:43.000Z
main.py
frankTurtle/discord-bots
376527ad32537b303a4718a525369df2f513cf2a
[ "MIT" ]
null
null
null
main.py
frankTurtle/discord-bots
376527ad32537b303a4718a525369df2f513cf2a
[ "MIT" ]
null
null
null
import os import json from web3 import Web3 import discord from discord.ext import commands, tasks BOT_TOKEN = os.environ["DISCORD_BOT_TOKEN"] # Initialized Discord client intents = discord.Intents.all() intents.members = True client = commands.Bot(intents=intents, help_command=None, command_prefix='&?') # Initialize web3 project_id = os.environ['WEB3_INFURA_PROJECT_ID'] polygon_mainnet_endpoint = f'https://polygon-mainnet.infura.io/v3/{project_id}' web3 = Web3(Web3.HTTPProvider(polygon_mainnet_endpoint)) assert(web3.isConnected()) def lp_contract_info(sushi_address, basePrice=1): address = Web3.toChecksumAddress(sushi_address) abi = json.loads('[{"inputs":[],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"spender","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1","type":"uint256"},{"indexed":true,"internalType":"address","name":"to","type":"address"}],"name":"Burn","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0In","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1In","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount0Out","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1Out","type":"uint256"},{"indexed":true,"internalType":"address","name":"to","type":"address"}],"name":"Swap","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint112","name":"reserve0","type":"uint112"},{"indexed":false,"internalType":"uint112","name":"reserve1","type":"uint112"}],"name":"Sync","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Transfer","type":"event"},{"inputs":[],"name":"DOMAIN_SEPARATOR","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"MINIMUM_LIQUIDITY","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"PERMIT_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"address","name":"","type":"address"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"burn","outputs":[{"internalType":"uint256","name":"amount0","type":"uint256"},{"internalType":"uint256","name":"amount1","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"factory","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"getReserves","outputs":[{"internalType":"uint112","name":"_reserve0","type":"uint112"},{"internalType":"uint112","name":"_reserve1","type":"uint112"},{"internalType":"uint32","name":"_blockTimestampLast","type":"uint32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_token0","type":"address"},{"internalType":"address","name":"_token1","type":"address"}],"name":"initialize","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"kLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"mint","outputs":[{"internalType":"uint256","name":"liquidity","type":"uint256"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"nonces","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"permit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"price0CumulativeLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"price1CumulativeLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"skim","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount0Out","type":"uint256"},{"internalType":"uint256","name":"amount1Out","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"bytes","name":"data","type":"bytes"}],"name":"swap","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"sync","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"token0","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"token1","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"}]') # noqa: E501 sushiLP = web3.eth.contract(address=address, abi=abi) try: Reserves = sushiLP.functions.getReserves().call() tokenPrice = Reserves[0]*basePrice*1e12/Reserves[1] return(tokenPrice) except Exception: pass @client.event async def on_ready(): print('Logged in as {0.user}'.format(client)) if not update_info.is_running(): update_info.start() @tasks.loop(seconds=300) async def update_info(): price = lp_contract_info(sushi_address='0x1e67124681b402064cd0abe8ed1b5c79d2e02f64') if price is not None: print(f'${price:,.2f} BCT') for guild in client.guilds: guser = guild.get_member(client.user.id) try: await guser.edit(nick=f'${price:,.2f} BCT') except discord.errors.HTTPException: return try: await client.change_presence( activity=discord.Activity( type=discord.ActivityType.watching, name='Toucan Protocol' ) ) except discord.errors.HTTPException: return client.run(BOT_TOKEN)
135.823529
7,427
0.658185
940
9,236
6.42766
0.182979
0.067362
0.106587
0.087223
0.715657
0.682224
0.588381
0.512579
0.464581
0.460775
0
0.033034
0.046232
9,236
67
7,428
137.850746
0.652855
0.005738
0
0.142857
0
0.020408
0.826996
0.811962
0
0
0.004576
0
0.020408
1
0.020408
false
0.020408
0.102041
0
0.163265
0.040816
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4acaa0d32bddd5ffacb9813549b695024fac223a
514
py
Python
response/requestHandler.py
moustachio-belvedere/aeroponicsforall
7537a10f2b2c6147fdbf966910885952105029e1
[ "MIT" ]
null
null
null
response/requestHandler.py
moustachio-belvedere/aeroponicsforall
7537a10f2b2c6147fdbf966910885952105029e1
[ "MIT" ]
null
null
null
response/requestHandler.py
moustachio-belvedere/aeroponicsforall
7537a10f2b2c6147fdbf966910885952105029e1
[ "MIT" ]
null
null
null
class MockFile(): def read(self): return False class RequestHandler(): def __init__(self): self.contentType = "" self.contents = MockFile() def getContents(self): return self.contents.read() def read(self): return self.contents def setStatus(self, status): self.status = status def getStatus(self): return self.status def getContentType(self): return self.contentType def getType(self): return 'static'
19.769231
35
0.605058
54
514
5.685185
0.333333
0.19544
0.18241
0.110749
0
0
0
0
0
0
0
0
0.297665
514
26
36
19.769231
0.850416
0
0
0.105263
0
0
0.01165
0
0
0
0
0
0
1
0.421053
false
0
0
0.315789
0.842105
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
4acf8cb9ac614a30c78e145550f0ca4d8175ee66
19
py
Python
torrentool/__init__.py
pawanshah1997/mytool
0cd88becf5b6078cad4365c93f0a3a3013495f96
[ "BSD-3-Clause" ]
null
null
null
torrentool/__init__.py
pawanshah1997/mytool
0cd88becf5b6078cad4365c93f0a3a3013495f96
[ "BSD-3-Clause" ]
null
null
null
torrentool/__init__.py
pawanshah1997/mytool
0cd88becf5b6078cad4365c93f0a3a3013495f96
[ "BSD-3-Clause" ]
null
null
null
VERSION = (0, 4, 0)
19
19
0.526316
4
19
2.5
0.75
0
0
0
0
0
0
0
0
0
0
0.2
0.210526
19
1
19
19
0.466667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
4aeb7d6aa14e4d0f63b5a7ab1753c7ade650334c
478
py
Python
keymaker/core/admin.py
fim/keymaker
ad0bc65cd33e5454ef6c1b52a3b5bc0cb81b1b94
[ "BSD-2-Clause" ]
1
2015-02-26T15:48:54.000Z
2015-02-26T15:48:54.000Z
keymaker/core/admin.py
fim/keymaker
ad0bc65cd33e5454ef6c1b52a3b5bc0cb81b1b94
[ "BSD-2-Clause" ]
null
null
null
keymaker/core/admin.py
fim/keymaker
ad0bc65cd33e5454ef6c1b52a3b5bc0cb81b1b94
[ "BSD-2-Clause" ]
null
null
null
from django.contrib import admin from keymaker.core.models import CSR, PrivateKey, Certificate, Subject class CSRAdmin(admin.ModelAdmin): pass class SubjectAdmin(admin.ModelAdmin): pass class PrivateKeyAdmin(admin.ModelAdmin): pass class CertificateAdmin(admin.ModelAdmin): pass admin.site.register(CSR, CSRAdmin) admin.site.register(PrivateKey, PrivateKeyAdmin) admin.site.register(Subject, SubjectAdmin) admin.site.register(Certificate, CertificateAdmin)
23.9
70
0.803347
54
478
7.111111
0.388889
0.15625
0.197917
0.1875
0
0
0
0
0
0
0
0
0.108787
478
19
71
25.157895
0.901408
0
0
0.285714
0
0
0
0
0
0
0
0
0
1
0
true
0.285714
0.142857
0
0.428571
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
4aec4747c233718503359150d69e5043cdf3cf66
498
py
Python
dfvfs/credentials/luksde_credentials.py
dfjxs/dfvfs
a4154b07bb08c3c86afa2847f3224189dd80c138
[ "Apache-2.0" ]
176
2015-01-02T13:55:39.000Z
2022-03-12T11:44:37.000Z
dfvfs/credentials/luksde_credentials.py
dfjxs/dfvfs
a4154b07bb08c3c86afa2847f3224189dd80c138
[ "Apache-2.0" ]
495
2015-01-13T06:47:06.000Z
2022-03-12T11:07:03.000Z
dfvfs/credentials/luksde_credentials.py
dfjxs/dfvfs
a4154b07bb08c3c86afa2847f3224189dd80c138
[ "Apache-2.0" ]
62
2015-02-23T08:19:38.000Z
2022-03-18T06:01:22.000Z
# -*- coding: utf-8 -*- """The LUKS Drive Encryption credentials.""" from dfvfs.credentials import credentials from dfvfs.credentials import manager from dfvfs.lib import definitions class LUKSDECredentials(credentials.Credentials): """LUKS Drive Encryption credentials.""" # TODO: add support for key_data credential. CREDENTIALS = frozenset([ 'password']) TYPE_INDICATOR = definitions.TYPE_INDICATOR_LUKSDE manager.CredentialsManager.RegisterCredentials(LUKSDECredentials())
24.9
67
0.7751
51
498
7.490196
0.588235
0.070681
0.099476
0.157068
0.193717
0
0
0
0
0
0
0.002304
0.128514
498
19
68
26.210526
0.87788
0.279116
0
0
0
0
0.023055
0
0
0
0
0.052632
0
1
0
false
0.125
0.375
0
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
1
1
0
1
0
0
4
ab15df2d80fd0f6c5ce767c00fade9386278c40a
249
py
Python
manage.py
Contraz/demosys-py
0479e0f3b0a3901f601bffd2d11e155f97b47555
[ "0BSD" ]
70
2017-03-31T12:01:41.000Z
2022-01-05T06:30:57.000Z
manage.py
Contraz/demosys-py
0479e0f3b0a3901f601bffd2d11e155f97b47555
[ "0BSD" ]
69
2017-06-18T22:37:46.000Z
2020-01-23T04:02:22.000Z
manage.py
Contraz/demosys-py
0479e0f3b0a3901f601bffd2d11e155f97b47555
[ "0BSD" ]
9
2017-05-13T21:13:02.000Z
2020-10-01T18:09:49.000Z
#!/usr/bin/env python3 import os import sys if __name__ == "__main__": os.environ.setdefault("DEMOSYS_SETTINGS_MODULE", "examples.settings") from demosys.management import execute_from_command_line execute_from_command_line(sys.argv)
22.636364
73
0.7751
33
249
5.363636
0.666667
0.124294
0.20339
0.248588
0
0
0
0
0
0
0
0.004608
0.128514
249
10
74
24.9
0.81106
0.084337
0
0
0
0
0.211454
0.101322
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
ab193c27bfef9fdf958904e7fb2540cf56c43b31
94
py
Python
osm_painter/__init__.py
nokutu/osm_painter
f8e7cc24e4f96b2df415380d99a47c915f2f128e
[ "MIT" ]
null
null
null
osm_painter/__init__.py
nokutu/osm_painter
f8e7cc24e4f96b2df415380d99a47c915f2f128e
[ "MIT" ]
null
null
null
osm_painter/__init__.py
nokutu/osm_painter
f8e7cc24e4f96b2df415380d99a47c915f2f128e
[ "MIT" ]
null
null
null
from .draw import draw from .model import BoxLocation, ElevationLayer, Layers, RadiusLocation
31.333333
70
0.829787
11
94
7.090909
0.727273
0
0
0
0
0
0
0
0
0
0
0
0.117021
94
2
71
47
0.939759
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
ab2d9afa39b9d2ef18c9e8884c53477f3c63fd63
718
py
Python
src/domain/route/repository.py
Monster-Gem/f-ticket
bfee2bcc3db92e2350c0f3bbc32108a79350583e
[ "MIT" ]
1
2022-03-23T13:35:38.000Z
2022-03-23T13:35:38.000Z
src/domain/route/repository.py
Monster-Gem/f-ticket
bfee2bcc3db92e2350c0f3bbc32108a79350583e
[ "MIT" ]
null
null
null
src/domain/route/repository.py
Monster-Gem/f-ticket
bfee2bcc3db92e2350c0f3bbc32108a79350583e
[ "MIT" ]
null
null
null
from . import entity from mongoengine.queryset.visitor import Q def get_all_routes(): return entity.Route.objects() def get_route_with_origin(origin): return entity.Route.objects(origin=origin) def get_route_with_destination(destination): return entity.Route.objects(destination=destination).first() def get_route(origin, destination): return entity.Route.objects(Q(origin=origin) & Q(destination=destination)).first() def add_route(route): route.save() return route def delete_route(route): route.delete() return def update_route(route, update_route): for key, value in update_route.items(): if value: route[key] = value route.save() return route
24.758621
86
0.725627
95
718
5.336842
0.294737
0.098619
0.134122
0.189349
0.138067
0
0
0
0
0
0
0
0.169916
718
29
87
24.758621
0.850671
0
0
0.181818
0
0
0
0
0
0
0
0
0
1
0.318182
false
0
0.090909
0.181818
0.727273
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
db6b9b33525d134e64a0e14f7ae6e8859ab8636e
119
py
Python
test_django_admin_bootstrapped/CapitalApp/models.py
Angoreher/django-admin-bootstrapped
6cb7782c0d23cd352042fc91de0c979f8d8fde52
[ "Apache-2.0" ]
987
2015-01-04T14:38:41.000Z
2022-03-23T21:38:42.000Z
test_django_admin_bootstrapped/CapitalApp/models.py
yakky/django-admin-bootstrapped
637baa44b5271465965941311c1fccef016137bf
[ "Apache-2.0" ]
104
2015-01-13T13:45:57.000Z
2021-08-15T05:01:56.000Z
test_django_admin_bootstrapped/CapitalApp/models.py
yakky/django-admin-bootstrapped
637baa44b5271465965941311c1fccef016137bf
[ "Apache-2.0" ]
290
2015-01-02T08:15:22.000Z
2022-03-10T07:51:01.000Z
from django.db import models # Create your models here. class CapitalModel(models.Model): name = models.TextField()
19.833333
33
0.764706
16
119
5.6875
0.8125
0
0
0
0
0
0
0
0
0
0
0
0.142857
119
5
34
23.8
0.892157
0.201681
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
db7db4b2b5bd00527769feb11fa1825106079504
192
py
Python
tcex/exit/__init__.py
GShepherdTC/tcex
70b1199b8bb9e63f53e2ba792489267108c909cd
[ "Apache-2.0" ]
null
null
null
tcex/exit/__init__.py
GShepherdTC/tcex
70b1199b8bb9e63f53e2ba792489267108c909cd
[ "Apache-2.0" ]
null
null
null
tcex/exit/__init__.py
GShepherdTC/tcex
70b1199b8bb9e63f53e2ba792489267108c909cd
[ "Apache-2.0" ]
null
null
null
"""Declares a service to manage exiting and cleaning up an app.""" # flake8: noqa # first-party from tcex.exit.error_codes import handle_error from tcex.exit.exit import ExitCode, ExitService
32
66
0.786458
30
192
4.966667
0.8
0.107383
0.161074
0
0
0
0
0
0
0
0
0.005988
0.130208
192
5
67
38.4
0.886228
0.447917
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
db91a80e99d278bf1a5f0301021d4c7be0c75f61
2,362
py
Python
ipfshttpclient/client/repo.py
iskanderandrews/py-ipfs-http-client
9b10477fee55ffc7580f770ab014b7e34a6b693b
[ "MIT" ]
186
2020-04-17T14:21:13.000Z
2022-03-28T20:16:14.000Z
ipfshttpclient/client/repo.py
iskanderandrews/py-ipfs-http-client
9b10477fee55ffc7580f770ab014b7e34a6b693b
[ "MIT" ]
104
2020-04-16T19:08:58.000Z
2022-03-02T01:13:51.000Z
ipfshttpclient/client/repo.py
iskanderandrews/py-ipfs-http-client
9b10477fee55ffc7580f770ab014b7e34a6b693b
[ "MIT" ]
94
2020-05-14T08:59:52.000Z
2022-03-31T12:13:27.000Z
from . import base class Section(base.SectionBase): @base.returns_multiple_items(base.ResponseBase) def gc(self, *, quiet: bool = False, **kwargs: base.CommonArgs): """Removes stored objects that are not pinned from the repo .. code-block:: python >>> client.repo.gc() [{'Key': 'QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQuwaHG2mpW2'}, {'Key': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'}, {'Key': 'QmRVBnxUCsD57ic5FksKYadtyUbMsyo9KYQKKELajqAp4q'}, … {'Key': 'QmYp4TeCurXrhsxnzt5wqLqqUz8ZRg5zsc7GuUrUSDtwzP'}] Performs a garbage collection sweep of the local set of stored objects and remove ones that are not pinned in order to reclaim hard disk space. Returns the hashes of all collected objects. Parameters ---------- quiet Should the client will avoid downloading the list of removed objects? Passing ``True`` to this parameter often causing the GC process to speed up tremendously as it will also avoid generating the list of removed objects in the connected daemon at all. Returns ------- dict List of IPFS objects that have been removed """ kwargs.setdefault("opts", {})["quiet"] = quiet return self._client.request('/repo/gc', decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def stat(self, **kwargs: base.CommonArgs): """Returns local repository status information .. code-block:: python >>> client.repo.stat() {'NumObjects': 354, 'RepoPath': '…/.local/share/ipfs', 'Version': 'fs-repo@4', 'RepoSize': 13789310} Returns ------- dict General information about the IPFS file repository +------------+-------------------------------------------------+ | NumObjects | Number of objects in the local repo. | +------------+-------------------------------------------------+ | RepoPath | The path to the repo being currently used. | +------------+-------------------------------------------------+ | RepoSize | Size in bytes that the repo is currently using. | +------------+-------------------------------------------------+ | Version | The repo version. | +------------+-------------------------------------------------+ """ return self._client.request('/repo/stat', decoder='json', **kwargs) #TODO: `version()`
32.356164
72
0.57536
235
2,362
5.782979
0.510638
0.020603
0.027962
0.023547
0.110375
0
0
0
0
0
0
0.018848
0.191363
2,362
72
73
32.805556
0.689529
0.817951
0
0
0
0
0.075269
0
0
0
0
0.013889
0
1
0.222222
false
0
0.111111
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
1
0
0
4
db924ffa9004430338179d39358ea9a3f932cd52
829
py
Python
test.py
coldmanck/dsa-py
0f406f5b967fa09a6257f3953d3072e5facbc204
[ "MIT" ]
null
null
null
test.py
coldmanck/dsa-py
0f406f5b967fa09a6257f3953d3072e5facbc204
[ "MIT" ]
null
null
null
test.py
coldmanck/dsa-py
0f406f5b967fa09a6257f3953d3072e5facbc204
[ "MIT" ]
null
null
null
map = \ [[0,0,1,0,0,0,0,1,0,0,0,0,0], [0,0,0,0,0,0,0,1,1,1,0,0,0], [0,1,1,0,1,0,0,0,0,0,0,0,0], [0,1,0,0,1,1,0,0,1,0,1,0,0], [0,1,0,0,1,1,0,0,1,1,1,0,0], [0,0,0,0,0,0,0,0,0,0,1,0,0], [0,0,0,0,0,0,0,1,1,1,0,0,0], [0,0,0,0,0,0,0,1,1,0,0,0,0]] def recursive_find(i, j, size): if i >= 0 and i < map.shape[0] and j >= 0 and j < map.shape[1] and map[i][j]==1: size += 1 map[i][j] = 2 size = recursive_find(i, j - 1, size) size = recursive_find(i, j + 1, size) size = recursive_find(i - 1, j, size) size = recursive_find(i + 1, j, size) return size import numpy as np map = np.array(map) sizes = [] for i in range(map.shape[0]): for j in range(map.shape[1]): size = 0 size = recursive_find(i, j, size) sizes.append(size) print(max(sizes))
24.382353
84
0.512666
205
829
2.043902
0.126829
0.300716
0.350835
0.381862
0.577566
0.477327
0.465394
0.451074
0.379475
0.379475
0
0.185827
0.234017
829
33
85
25.121212
0.474016
0
0
0.074074
0
0
0
0
0
0
0
0
0
1
0.037037
false
0
0.037037
0
0.111111
0.037037
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
db9532a5de9c9ac7bb3cdc94f05a9a6cb691255b
2,405
py
Python
FBSSchemaGenerator/build/python/FETObs.py
sintefneodroid/schema
05749c7b84caf188d555947fb6572bc48a5a9067
[ "Apache-2.0" ]
2
2018-04-28T13:45:45.000Z
2019-01-21T14:39:53.000Z
FBSSchemaGenerator/build/python/FETObs.py
sintefneodroid/schema
05749c7b84caf188d555947fb6572bc48a5a9067
[ "Apache-2.0" ]
null
null
null
FBSSchemaGenerator/build/python/FETObs.py
sintefneodroid/schema
05749c7b84caf188d555947fb6572bc48a5a9067
[ "Apache-2.0" ]
null
null
null
# automatically generated by the FlatBuffers compiler, do not modify # namespace: FBS import flatbuffers class FETObs(object): __slots__ = ['_tab'] @classmethod def GetRootAsFETObs(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = FETObs() x.Init(buf, n + offset) return x # FETObs def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # FETObs def Transform(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: x = o + self._tab.Pos from .FEulerTransform import FEulerTransform obj = FEulerTransform() obj.Init(self._tab.Bytes, x) return obj return None # FETObs def PosRange(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: x = o + self._tab.Pos from .FRange import FRange obj = FRange() obj.Init(self._tab.Bytes, x) return obj return None # FETObs def RotRange(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: x = o + self._tab.Pos from .FRange import FRange obj = FRange() obj.Init(self._tab.Bytes, x) return obj return None # FETObs def DirRange(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: x = o + self._tab.Pos from .FRange import FRange obj = FRange() obj.Init(self._tab.Bytes, x) return obj return None def FETObsStart(builder): builder.StartObject(4) def FETObsAddTransform(builder, transform): builder.PrependStructSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(transform), 0) def FETObsAddPosRange(builder, posRange): builder.PrependStructSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(posRange), 0) def FETObsAddRotRange(builder, rotRange): builder.PrependStructSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(rotRange), 0) def FETObsAddDirRange(builder, dirRange): builder.PrependStructSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(dirRange), 0) def FETObsEnd(builder): return builder.EndObject()
33.873239
134
0.63659
283
2,405
5.289753
0.243816
0.060788
0.117568
0.187041
0.496994
0.496994
0.387442
0.387442
0.387442
0.374082
0
0.010164
0.263617
2,405
70
135
34.357143
0.835121
0.048233
0
0.490566
1
0
0.001754
0
0
0
0
0
0
1
0.226415
false
0
0.09434
0.018868
0.528302
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
dba0434b869019c9e8fc4dba6e2d485ed2d35d21
2,366
py
Python
idb/grpc/file.py
asafkorem/idb
43a3eccc1389e3c9f527992c64ef69b9b16b2472
[ "MIT" ]
null
null
null
idb/grpc/file.py
asafkorem/idb
43a3eccc1389e3c9f527992c64ef69b9b16b2472
[ "MIT" ]
null
null
null
idb/grpc/file.py
asafkorem/idb
43a3eccc1389e3c9f527992c64ef69b9b16b2472
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from idb.common.types import FileContainer, FileContainerType from idb.grpc.idb_pb2 import FileContainer as GrpcFileContainer def container_to_grpc(container: FileContainer) -> GrpcFileContainer: if isinstance(container, str): return GrpcFileContainer( kind=GrpcFileContainer.APPLICATION, bundle_id=container ) if container == FileContainerType.MEDIA: return GrpcFileContainer(kind=GrpcFileContainer.MEDIA) if container == FileContainerType.CRASHES: return GrpcFileContainer(kind=GrpcFileContainer.CRASHES) if container == FileContainerType.ROOT: return GrpcFileContainer(kind=GrpcFileContainer.ROOT) if container == FileContainerType.PROVISIONING_PROFILES: return GrpcFileContainer(kind=GrpcFileContainer.PROVISIONING_PROFILES) if container == FileContainerType.MDM_PROFILES: return GrpcFileContainer(kind=GrpcFileContainer.MDM_PROFILES) if container == FileContainerType.SPRINGBOARD_ICONS: return GrpcFileContainer(kind=GrpcFileContainer.SPRINGBOARD_ICONS) if container == FileContainerType.WALLPAPER: return GrpcFileContainer(kind=GrpcFileContainer.WALLPAPER) if container == FileContainerType.DISK_IMAGES: return GrpcFileContainer(kind=GrpcFileContainer.DISK_IMAGES) if container == FileContainerType.GROUP: return GrpcFileContainer(kind=GrpcFileContainer.GROUP_CONTAINER) if container == FileContainerType.APPLICATION: return GrpcFileContainer(kind=GrpcFileContainer.APPLICATION_CONTAINER) if container == FileContainerType.AUXILLARY: return GrpcFileContainer(kind=GrpcFileContainer.AUXILLARY) if container == FileContainerType.XCTEST: return GrpcFileContainer(kind=GrpcFileContainer.XCTEST) if container == FileContainerType.DYLIB: return GrpcFileContainer(kind=GrpcFileContainer.DYLIB) if container == FileContainerType.DSYM: return GrpcFileContainer(kind=GrpcFileContainer.DSYM) if container == FileContainerType.FRAMEWORK: return GrpcFileContainer(kind=GrpcFileContainer.FRAMEWORK) return GrpcFileContainer(kind=GrpcFileContainer.NONE)
50.340426
78
0.775993
218
2,366
8.357798
0.302752
0.214599
0.251921
0.410538
0.175631
0
0
0
0
0
0
0.001002
0.156382
2,366
46
79
51.434783
0.911824
0.08115
0
0
0
0
0
0
0
0
0
0
0
1
0.026316
false
0
0.052632
0
0.526316
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
dba274401b44c55159106f26d48c38e10699dc73
1,159
py
Python
config/routers.py
hyphae/apis-service_center
0a6f0d349a435a3c739c2ee1ebdcc010c2dd13d1
[ "Apache-2.0" ]
null
null
null
config/routers.py
hyphae/apis-service_center
0a6f0d349a435a3c739c2ee1ebdcc010c2dd13d1
[ "Apache-2.0" ]
3
2021-03-20T10:35:08.000Z
2022-03-21T16:54:05.000Z
config/routers.py
hyphae/apis-service_center
0a6f0d349a435a3c739c2ee1ebdcc010c2dd13d1
[ "Apache-2.0" ]
null
null
null
"""Django でモデルとデータベース定義との対応付けをするクラスを定義 settings.DATABASE_ROUTERS でクラスを指定する. """ from django.conf import settings class DatabaseRouter: """モデルとデータベース定義との対応付けをするクラス config.DATABASE_APPS_MAPPING を参照し config.DATABASES のエントリ名を返す. """ def db_for_read(self, model, **hints): if model._meta.app_label in settings.DATABASE_APPS_MAPPING: return settings.DATABASE_APPS_MAPPING[model._meta.app_label] return settings.DATABASE_APPS_MAPPING['*'] def db_for_write(self, model, **hints): if model._meta.app_label in settings.DATABASE_APPS_MAPPING: return settings.DATABASE_APPS_MAPPING[model._meta.app_label] return settings.DATABASE_APPS_MAPPING['*'] def allow_relation(self, obj1, obj2, **hints): db1 = settings.DATABASE_APPS_MAPPING.get(obj1._meta.app_label) db2 = settings.DATABASE_APPS_MAPPING.get(obj2._meta.app_label) if db1 and db2: return db1 == db2 if not db1 and db2: return True return None def allow_migrate(self, db, app_label, model=None, **hints): if app_label in settings.DATABASE_APPS_MAPPING: return settings.DATABASE_APPS_MAPPING.get(app_label) == db return settings.DATABASE_APPS_MAPPING.get('*') == db
32.194444
64
0.780846
161
1,159
5.335404
0.273292
0.223516
0.265425
0.345751
0.541327
0.471478
0.426077
0.426077
0.426077
0.426077
0
0.011753
0.119068
1,159
35
65
33.114286
0.829579
0.138913
0
0.272727
0
0
0.003052
0
0
0
0
0
0
1
0.181818
false
0
0.045455
0
0.681818
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
dbbb068e3c1eaa17d7fdfe808640a9ea18b469e2
228
py
Python
scripts/update_centroid_reports.py
sot/mica
136a9b0d9521efda5208067b51cf0c8700b4def3
[ "BSD-3-Clause" ]
null
null
null
scripts/update_centroid_reports.py
sot/mica
136a9b0d9521efda5208067b51cf0c8700b4def3
[ "BSD-3-Clause" ]
150
2015-01-23T17:09:53.000Z
2022-01-10T00:50:54.000Z
scripts/update_centroid_reports.py
sot/mica
136a9b0d9521efda5208067b51cf0c8700b4def3
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst import mica.centroid_dashboard # Cheat. Needs entrypoint scripts mica.centroid_dashboard.update_observed_metrics(save=True, make_plots=True)
32.571429
75
0.815789
34
228
5.323529
0.852941
0.132597
0.232044
0
0
0
0
0
0
0
0
0.004878
0.100877
228
6
76
38
0.878049
0.5
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
91741e1d0f4d038601721d995d62edac19715782
124
py
Python
Aula01/aula01EstruturaDeRepeticaoWhile.py
leonardogt4/Curso-Introdu-o-a-Python---Univali
7f2a7c46b8ddf72391e58f22099d3d8ec91cbf7b
[ "MIT" ]
2
2019-03-08T21:21:03.000Z
2019-03-08T21:21:16.000Z
Aula01/aula01EstruturaDeRepeticaoWhile.py
leonardogt4/Curso-Introdu-o-a-Python---Univali
7f2a7c46b8ddf72391e58f22099d3d8ec91cbf7b
[ "MIT" ]
null
null
null
Aula01/aula01EstruturaDeRepeticaoWhile.py
leonardogt4/Curso-Introdu-o-a-Python---Univali
7f2a7c46b8ddf72391e58f22099d3d8ec91cbf7b
[ "MIT" ]
null
null
null
print("Test WHILE") total = 0 while total <= 100: print("%d Interação de Loop\n" %total) total += 1 print("End")
12.4
42
0.596774
19
124
3.894737
0.684211
0.27027
0
0
0
0
0
0
0
0
0
0.052632
0.233871
124
9
43
13.777778
0.726316
0
0
0
0
0
0.282258
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
91a89b6a062423aad50899fab0b7a9e7ae6a96ff
211
py
Python
terra_sdk/util/hash.py
yeeyangtee/terra-sdk-python
44e31290cfcb5563dd31a0d9c64c3ef2af72c0e2
[ "MIT" ]
66
2021-10-21T23:29:38.000Z
2022-03-30T15:58:13.000Z
terra_sdk/util/hash.py
yeeyangtee/terra-sdk-python
44e31290cfcb5563dd31a0d9c64c3ef2af72c0e2
[ "MIT" ]
50
2021-10-19T06:11:56.000Z
2022-03-31T17:06:57.000Z
terra_sdk/util/hash.py
yeeyangtee/terra-sdk-python
44e31290cfcb5563dd31a0d9c64c3ef2af72c0e2
[ "MIT" ]
39
2021-11-07T17:28:31.000Z
2022-03-31T15:03:57.000Z
import base64 import hashlib def hash_amino(txdata: str) -> str: """Get the transaction hash from Amino-encoded Transaction in base64.""" return hashlib.sha256(base64.b64decode(txdata)).digest().hex()
26.375
76
0.734597
28
211
5.5
0.678571
0
0
0
0
0
0
0
0
0
0
0.060773
0.14218
211
7
77
30.142857
0.790055
0.312796
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
0
0
0
4
91b87097ca71d86eb77a4fdbf667a3e9e9177372
154
py
Python
backend/users/tasks.py
jaenia/test-django-react-boilerplate
d172cc56c7ad93870d05ce3c41742fcc28d6bbb3
[ "MIT" ]
null
null
null
backend/users/tasks.py
jaenia/test-django-react-boilerplate
d172cc56c7ad93870d05ce3c41742fcc28d6bbb3
[ "MIT" ]
null
null
null
backend/users/tasks.py
jaenia/test-django-react-boilerplate
d172cc56c7ad93870d05ce3c41742fcc28d6bbb3
[ "MIT" ]
null
null
null
from django.core import management from todolist2 import celery_app @celery_app.task def clearsessions(): management.call_command('clearsessions')
17.111111
44
0.805195
19
154
6.368421
0.684211
0.14876
0
0
0
0
0
0
0
0
0
0.007407
0.123377
154
8
45
19.25
0.888889
0
0
0
0
0
0.084416
0
0
0
0
0
0
1
0.2
true
0
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
91d920cc119dc78a1e47d124e0deba5e4dda51d3
222
py
Python
deepomatic/cli/cmds/site/list.py
Deepomatic/deepocli
127b95200c3c8e4cbe98bb1548fe865893ab2a49
[ "MIT" ]
15
2018-12-19T16:51:05.000Z
2019-02-22T09:08:05.000Z
deepomatic/cli/cmds/site/list.py
Deepomatic/deepocli
127b95200c3c8e4cbe98bb1548fe865893ab2a49
[ "MIT" ]
150
2018-12-20T13:45:05.000Z
2022-03-12T01:04:32.000Z
deepomatic/cli/cmds/site/list.py
Deepomatic/deepocli
127b95200c3c8e4cbe98bb1548fe865893ab2a49
[ "MIT" ]
null
null
null
from ..utils import Command from .utils import SiteManager class ListCommand(Command): """ Get the list of installed sites """ def run(self, **kwargs): print('\n'.join(SiteManager().list()))
18.5
46
0.621622
26
222
5.307692
0.769231
0.130435
0.217391
0
0
0
0
0
0
0
0
0
0.238739
222
11
47
20.181818
0.816568
0.13964
0
0
0
0
0.011696
0
0
0
0
0
0
1
0.2
false
0
0.4
0
0.8
0.2
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
37d12d961dc439435eb2e7cddbd7b53f5561d58f
1,555
py
Python
metadeploy/api/migrations/0016_use_hashids.py
sfdc-qbranch/MetaDeploy
d22547b3814dbec6aefa4d86b9f81c6f175c1b67
[ "BSD-3-Clause" ]
33
2019-03-20T15:34:39.000Z
2022-03-30T15:59:40.000Z
metadeploy/api/migrations/0016_use_hashids.py
sfdc-qbranch/MetaDeploy
d22547b3814dbec6aefa4d86b9f81c6f175c1b67
[ "BSD-3-Clause" ]
2,718
2019-02-27T19:46:07.000Z
2022-03-11T23:18:09.000Z
metadeploy/api/migrations/0016_use_hashids.py
sfdc-qbranch/MetaDeploy
d22547b3814dbec6aefa4d86b9f81c6f175c1b67
[ "BSD-3-Clause" ]
28
2019-03-28T04:57:16.000Z
2022-02-04T16:49:25.000Z
# Generated by Django 2.1.2 on 2018-11-07 17:52 import hashid_field.field from django.db import migrations alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890" class Migration(migrations.Migration): dependencies = [("api", "0015_merge_20181031_0222")] operations = [ migrations.AlterField( model_name="job", name="id", field=hashid_field.field.HashidAutoField( alphabet=alphabet, min_length=7, primary_key=True, serialize=False ), ), migrations.AlterField( model_name="plan", name="id", field=hashid_field.field.HashidAutoField( alphabet=alphabet, min_length=7, primary_key=True, serialize=False ), ), migrations.AlterField( model_name="product", name="id", field=hashid_field.field.HashidAutoField( alphabet=alphabet, min_length=7, primary_key=True, serialize=False ), ), migrations.AlterField( model_name="step", name="id", field=hashid_field.field.HashidAutoField( alphabet=alphabet, min_length=7, primary_key=True, serialize=False ), ), migrations.AlterField( model_name="version", name="id", field=hashid_field.field.HashidAutoField( alphabet=alphabet, min_length=7, primary_key=True, serialize=False ), ), ]
31.1
82
0.58328
144
1,555
6.131944
0.319444
0.074745
0.10872
0.164213
0.674972
0.674972
0.674972
0.674972
0.674972
0.674972
0
0.043396
0.318328
1,555
49
83
31.734694
0.789623
0.028939
0
0.714286
1
0
0.082228
0.057029
0
0
0
0
0
1
0
false
0
0.047619
0
0.119048
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
37d8274952af104cee7cdbb8c0f605319df4cd54
195
py
Python
pythontraining/mymodule_user.py
srikanteswartalluri/pyutils
bf8d56ac9e9b0786861c08ef32eae49b021f20a3
[ "0BSD" ]
null
null
null
pythontraining/mymodule_user.py
srikanteswartalluri/pyutils
bf8d56ac9e9b0786861c08ef32eae49b021f20a3
[ "0BSD" ]
null
null
null
pythontraining/mymodule_user.py
srikanteswartalluri/pyutils
bf8d56ac9e9b0786861c08ef32eae49b021f20a3
[ "0BSD" ]
null
null
null
__author__ = 'talluri' import mymodule print mymodule.VERSION print mymodule.version() from mymodule import *#not a good practice, it pollutes local name space print VERSION print version()
15
72
0.784615
26
195
5.730769
0.615385
0.174497
0.268456
0
0
0
0
0
0
0
0
0
0.153846
195
12
73
16.25
0.90303
0.251282
0
0
0
0
0.048276
0
0
0
0
0
0
0
null
null
0
0.285714
null
null
0.571429
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
4
37da8be5809f4b80c257533903d5d11aa721345a
1,872
py
Python
modules/signatures/windows/disables_browserwarn.py
Yuanmessi/Bold-Falcon
00fcaba0b3d9c462b9d20ecb256ff85db5d119e2
[ "BSD-3-Clause" ]
24
2021-06-21T07:35:37.000Z
2022-03-22T03:33:59.000Z
modules/signatures/windows/disables_browserwarn.py
Yuanmessi/Bold-Falcon
00fcaba0b3d9c462b9d20ecb256ff85db5d119e2
[ "BSD-3-Clause" ]
3
2021-07-01T08:09:05.000Z
2022-01-28T03:38:36.000Z
modules/signatures/windows/disables_browserwarn.py
Yuanmessi/Bold-Falcon
00fcaba0b3d9c462b9d20ecb256ff85db5d119e2
[ "BSD-3-Clause" ]
6
2021-06-22T05:32:57.000Z
2022-02-11T02:05:45.000Z
# Copyright (C) 2015 Optiv, Inc. (brad.spengler@optiv.com), Kevin Ross, Updated 2016 for Cuckoo 2.0 # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. from lib.cuckoo.common.abstracts import Signature class DisablesBrowserWarn(Signature): name = "disables_browser_warn" description = "Attempts to disable browser security warnings" severity = 3 categories = ["generic", "banker", "clickfraud"] authors = ["Optiv", "Kevin Ross"] minimum = "2.0" ttp = ["T1089"] regkeys_re = [ ".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\WarnOnBadCertRecving", ".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\WarnOnBadCertSending", ".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\WarnOnHTTPSToHTTPRedirect", ".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\WarnOnZoneCrossing", ".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\WarnOnPostRedirect", ".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Windows\\\\CurrentVersion\\\\Internet\\ Settings\\\\IEHardenIENoWarn", ".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Internet\\ Explorer\\\\Main\\\\NoProtectedModeBanner", ".*\\\\SOFTWARE\\\\(Wow6432Node\\\\)?Microsoft\\\\Internet\\ Explorer\\\\Main\\\\IE9RunOncePerInstall", ] def on_complete(self): for indicator in self.regkeys_re: for regkey in self.check_key(pattern=indicator, regex=True, actions=["regkey_written"], all=True): self.mark_ioc("registry", regkey) return self.has_marks()
56.727273
137
0.651709
170
1,872
7.123529
0.588235
0.125516
0.184971
0.17341
0.401321
0.401321
0.322048
0
0
0
0
0.030979
0.137821
1,872
32
138
58.5
0.719331
0.115385
0
0
0
0
0.640436
0.567191
0
0
0
0
0
1
0.041667
false
0
0.041667
0
0.5
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
5310795e16ddb15e63de386e9c904f7f7aedbfa9
71
py
Python
beginning-python-3ed-master/Chapter10/listing10-3.py
zhuxinkai/python3-book-practice
a0d297df0755eaf239398f138c3b38746c32ac99
[ "MIT" ]
null
null
null
beginning-python-3ed-master/Chapter10/listing10-3.py
zhuxinkai/python3-book-practice
a0d297df0755eaf239398f138c3b38746c32ac99
[ "MIT" ]
1
2020-06-19T05:47:38.000Z
2020-06-19T05:47:46.000Z
beginning-python-3ed-master/Chapter10/listing10-3.py
zhuxinkai/python3-book-practice
a0d297df0755eaf239398f138c3b38746c32ac99
[ "MIT" ]
null
null
null
# hello3.py def hello(): print("Hello, world!") # A test: hello()
10.142857
26
0.577465
10
71
4.1
0.8
0
0
0
0
0
0
0
0
0
0
0.017857
0.211268
71
7
27
10.142857
0.714286
0.239437
0
0
0
0
0.25
0
0
0
0
0
0
1
0.333333
true
0
0
0
0.333333
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
4
53225ca1acb5de9668657bfef0250c2bb0f98b48
13,155
py
Python
Patent_US_3423942_Rocket_Engine/Version_1/unit_tests.py
Jay4C/Python-Macros-For_FreeCAD
12ce5441a26731377fa43e86ccd2be675740d3a0
[ "MIT" ]
null
null
null
Patent_US_3423942_Rocket_Engine/Version_1/unit_tests.py
Jay4C/Python-Macros-For_FreeCAD
12ce5441a26731377fa43e86ccd2be675740d3a0
[ "MIT" ]
null
null
null
Patent_US_3423942_Rocket_Engine/Version_1/unit_tests.py
Jay4C/Python-Macros-For_FreeCAD
12ce5441a26731377fa43e86ccd2be675740d3a0
[ "MIT" ]
null
null
null
import os import time import unittest import pywinauto.mouse import pywinauto.keyboard # https://patents.google.com/patent/US3423942A/en?q=rocket+engine&before=priority:19900101 class UnitTestsPatentUS3423942RocketEngineVersion(unittest.TestCase): # ok def test_part_nozzle(self): print("test_part_nozzle") if os.path.exists("part_nozzle.py"): os.remove("part_nozzle.py") else: print("The file does not exist") # Writing to file with open("part_nozzle.py", "w") as file: # Writing data to a file file.write("""import FreeCAD, Part, Drawing, math, Mesh DOC = FreeCAD.activeDocument() DOC_NAME = "part_nozzle" def clear_doc(): # Clear the active document deleting all the objects for obj in DOC.Objects: DOC.removeObject(obj.Name) def setview(): # Rearrange View FreeCAD.Gui.SendMsgToActiveView("ViewFit") FreeCAD.Gui.activeDocument().activeView().viewAxometric() if DOC is None: FreeCAD.newDocument(DOC_NAME) FreeCAD.setActiveDocument(DOC_NAME) DOC = FreeCAD.activeDocument() else: clear_doc() # EPS= tolerance to use to cut the parts EPS = 0.10 EPS_C = EPS * (-0.5) diametre_maximal_of_nozzle_front = 85 # part_nozzle part_nozzle = Part.makeCylinder(diametre_maximal_of_nozzle_front/2, 3) # part_nozzle cut by cylinder_1 cylinder_1 = Part.makeCylinder(diametre_maximal_of_nozzle_front/2 - 3.5 - 5 - 4.5 - 2, 3) part_nozzle = part_nozzle.cut(cylinder_1) # holes for fixing the nozzle degre = 60 for i in range(int(360/degre)): radius = diametre_maximal_of_nozzle_front/2 - 2.5 - 3.5 alpha=(i*degre*math.pi)/180 hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0) hole = Part.makeCylinder(2.5, 3) hole.translate(hole_vector) part_nozzle = part_nozzle.cut(hole) # cone_1 cone_1_radius_1 = diametre_maximal_of_nozzle_front/2 - 3.5 - 5 - 4.5 cone_1_radius_2 = 20/2 cone_1_height = 50 cone_1 = Part.makeCone(cone_1_radius_1, cone_1_radius_2, cone_1_height) # cone_2 cone_2_radius_1 = cone_1_radius_1 - 2 cone_2_radius_2 = cone_1_radius_2 - 2 cone_2_height = 50 cone_2 = Part.makeCone(cone_2_radius_1, cone_2_radius_2, cone_2_height) # cone_1 cut by cone_2 cone_1 = cone_1.cut(cone_2) # part_nozzle fused with cone_1 cone_1_vector = FreeCAD.Vector(0, 0, 3) cone_1.translate(cone_1_vector) part_nozzle = part_nozzle.fuse(cone_1) # cone_3 cone_3_radius_1 = 20/2 cone_3_radius_2 = diametre_maximal_of_nozzle_front/2 cone_3_height = 100 cone_3 = Part.makeCone(cone_3_radius_1, cone_3_radius_2, cone_3_height) # cone_4 cone_4_radius_1 = cone_3_radius_1 - 2 cone_4_radius_2 = cone_3_radius_2 - 2 cone_4_height = 100 cone_4 = Part.makeCone(cone_4_radius_1, cone_4_radius_2, cone_4_height) # cone_3 cut by cone_4 cone_3 = cone_3.cut(cone_4) # part_nozzle fused with cone_3 cone_3_vector = FreeCAD.Vector(0, 0, 3 + cone_1_height) cone_3.translate(cone_3_vector) part_nozzle = part_nozzle.fuse(cone_3) Part.show(part_nozzle) DOC.recompute() __objs__ = [] __objs__.append(FreeCAD.getDocument("part_nozzle").getObject("Shape")) stl_file = u"part_nozzle.stl" Mesh.export(__objs__, stl_file) setview() """) time.sleep(3) pywinauto.mouse.click(button="left", coords=(460, 750)) time.sleep(3) pywinauto.mouse.click(button="left", coords=(70, 670)) time.sleep(3) pywinauto.keyboard.send_keys( 'exec{(}open{(}"part_nozzle.py"{)}.read{(}{)}{)}' ) time.sleep(3) pywinauto.keyboard.send_keys('{ENTER}') # ok def test_part_gas_entry(self): print("test_part_gas_entry") if os.path.exists("part_gas_entry.py"): os.remove("part_gas_entry.py") else: print("The file does not exist") # Writing to file with open("part_gas_entry.py", "w") as file: # Writing data to a file file.write("""import FreeCAD, Part, Drawing, math, Mesh DOC = FreeCAD.activeDocument() DOC_NAME = "part_gas_entry" def clear_doc(): # Clear the active document deleting all the objects for obj in DOC.Objects: DOC.removeObject(obj.Name) def setview(): # Rearrange View FreeCAD.Gui.SendMsgToActiveView("ViewFit") FreeCAD.Gui.activeDocument().activeView().viewAxometric() if DOC is None: FreeCAD.newDocument(DOC_NAME) FreeCAD.setActiveDocument(DOC_NAME) DOC = FreeCAD.activeDocument() else: clear_doc() # EPS= tolerance to use to cut the parts EPS = 0.10 EPS_C = EPS * (-0.5) diametre_maximal_of_nozzle_front = 85 # part_gas_entry part_gas_entry = Part.makeCylinder(diametre_maximal_of_nozzle_front/2, 3) # part_gas_entry cut by cylinder_1 cylinder_1 = Part.makeCylinder(11/2, 3) part_gas_entry = part_gas_entry.cut(cylinder_1) # holes for fixing the nozzle degre = 60 for i in range(int(360/degre)): radius = diametre_maximal_of_nozzle_front/2 - 2.5 - 3.5 alpha=(i*degre*math.pi)/180 hole_vector = FreeCAD.Vector(radius*math.cos(alpha), radius*math.sin(alpha), 0) hole = Part.makeCylinder(2.5, 3) hole.translate(hole_vector) part_gas_entry = part_gas_entry.cut(hole) Part.show(part_gas_entry) DOC.recompute() __objs__ = [] __objs__.append(FreeCAD.getDocument("part_gas_entry").getObject("Shape")) stl_file = u"part_gas_entry.stl" Mesh.export(__objs__, stl_file) setview() """) time.sleep(3) pywinauto.mouse.click(button="left", coords=(460, 750)) time.sleep(3) pywinauto.mouse.click(button="left", coords=(70, 670)) time.sleep(3) pywinauto.keyboard.send_keys( 'exec{(}open{(}"part_gas_entry.py"{)}.read{(}{)}{)}' ) time.sleep(3) pywinauto.keyboard.send_keys('{ENTER}') # ok # https://www.leroymerlin.fr/produits/chauffage-plomberie/circuit-alimentation-en-eau/tube-et-raccord-alimentation/raccord-alimentation/lot-de-2-manchons-a-visser-laiton-f-12-x-17-pour-tube-en-cuivre-65815253.html def test_part_element_64(self): print("test_part_element_64") if os.path.exists("part_element_64.py"): os.remove("part_element_64.py") else: print("The file does not exist") # Writing to file with open("part_element_64.py", "w") as file: # Writing data to a file file.write("""import FreeCAD, Part, Mesh DOC = FreeCAD.activeDocument() DOC_NAME = "part_element_64" def clear_doc(): # Clear the active document deleting all the objects for obj in DOC.Objects: DOC.removeObject(obj.Name) def setview(): # Rearrange View FreeCAD.Gui.SendMsgToActiveView("ViewFit") FreeCAD.Gui.activeDocument().activeView().viewAxometric() if DOC is None: FreeCAD.newDocument(DOC_NAME) FreeCAD.setActiveDocument(DOC_NAME) DOC = FreeCAD.activeDocument() else: clear_doc() # EPS= tolerance to use to cut the parts EPS = 0.10 EPS_C = EPS * -0.5 diametre_maximal = 22 cylinder_1 = Part.makeCylinder(diametre_maximal/2, 18) cylinder_2 = Part.makeCylinder(16.4/2, 18) cylinder_1 = cylinder_1.cut(cylinder_2) Part.show(cylinder_1) DOC.recompute() __objs__=[] __objs__.append(FreeCAD.getDocument("part_element_64").getObject("Shape")) stl_file = u"part_element_64.stl" Mesh.export(__objs__, stl_file) setview() """) time.sleep(3) pywinauto.mouse.click(button="left", coords=(460, 750)) time.sleep(3) pywinauto.mouse.click(button="left", coords=(70, 670)) time.sleep(3) pywinauto.keyboard.send_keys( 'exec{(}open{(}"part_element_64.py"{)}.read{(}{)}{)}' ) time.sleep(3) pywinauto.keyboard.send_keys('{ENTER}') # ok # https://www.leroymerlin.fr/produits/chauffage-plomberie/circuit-alimentation-en-eau/tube-et-raccord-alimentation/raccord-alimentation/lot-de-2-mamelons-a-visser-laiton-m-12-x-17-pour-tube-en-cuivre-65814231.html def test_part_element_32(self): print("test_part_element_32") if os.path.exists("part_element_32.py"): os.remove("part_element_32.py") else: print("The file does not exist") # Writing to file with open("part_element_32.py", "w") as file: # Writing data to a file file.write("""import FreeCAD, Part, Mesh DOC = FreeCAD.activeDocument() DOC_NAME = "part_element_32" def clear_doc(): # Clear the active document deleting all the objects for obj in DOC.Objects: DOC.removeObject(obj.Name) def setview(): # Rearrange View FreeCAD.Gui.SendMsgToActiveView("ViewFit") FreeCAD.Gui.activeDocument().activeView().viewAxometric() if DOC is None: FreeCAD.newDocument(DOC_NAME) FreeCAD.setActiveDocument(DOC_NAME) DOC = FreeCAD.activeDocument() else: clear_doc() # EPS= tolerance to use to cut the parts EPS = 0.10 EPS_C = EPS * -0.5 diametre_maximal = 19 cylinder_1 = Part.makeCylinder(diametre_maximal/2, 19) cylinder_2 = Part.makeCylinder(11/2, 19) cylinder_1 = cylinder_1.cut(cylinder_2) cylinder_3 = Part.makeCylinder(diametre_maximal/2, 8) cylinder_4 = Part.makeCylinder(16/2, 8) cylinder_3 = cylinder_3.cut(cylinder_4) cylinder_1 = cylinder_1.cut(cylinder_3) cylinder_3_vector = FreeCAD.Vector(0, 0, 11) cylinder_3.translate(cylinder_3_vector) cylinder_1 = cylinder_1.cut(cylinder_3) Part.show(cylinder_1) DOC.recompute() __objs__=[] __objs__.append(FreeCAD.getDocument("part_element_32").getObject("Shape")) stl_file = u"part_element_32.stl" Mesh.export(__objs__, stl_file) setview() """) time.sleep(3) pywinauto.mouse.click(button="left", coords=(460, 750)) time.sleep(3) pywinauto.mouse.click(button="left", coords=(70, 670)) time.sleep(3) pywinauto.keyboard.send_keys( 'exec{(}open{(}"part_element_32.py"{)}.read{(}{)}{)}' ) time.sleep(3) pywinauto.keyboard.send_keys('{ENTER}') # ok def test_assembly(self): print("test_assembly") if os.path.exists("assembly.py"): os.remove("assembly.py") else: print("The file does not exist") # Writing to file with open("assembly.py", "w") as file: # Writing data to a file file.write("""import FreeCAD, Part, Mesh, math DOC = FreeCAD.activeDocument() DOC_NAME = "assembly" def clear_doc(): # Clear the active document deleting all the objects for obj in DOC.Objects: DOC.removeObject(obj.Name) def setview(): # Rearrange View FreeCAD.Gui.SendMsgToActiveView("ViewFit") FreeCAD.Gui.activeDocument().activeView().viewAxometric() if DOC is None: FreeCAD.newDocument(DOC_NAME) FreeCAD.setActiveDocument(DOC_NAME) DOC = FreeCAD.activeDocument() else: clear_doc() # EPS= tolerance to use to cut the parts EPS = 0.10 EPS_C = EPS * (-0.5) # insertion part_gas_entry Mesh.insert(u"part_gas_entry.stl", "assembly") FreeCAD.getDocument("assembly").getObject("part_gas_entry").Placement = App.Placement(App.Vector(0, 0, 0), App.Rotation(App.Vector(0,0,1), 0)) FreeCADGui.getDocument("assembly").getObject("part_gas_entry").ShapeColor = (0.50,0.40,0.30) # insertion part_nozzle Mesh.insert(u"part_nozzle.stl", "assembly") FreeCAD.getDocument("assembly").getObject("part_nozzle").Placement = App.Placement(App.Vector(0, 0, 3), App.Rotation(App.Vector(0,0,1), 0)) FreeCADGui.getDocument("assembly").getObject("part_nozzle").ShapeColor = (0.10,0.20,0.30) FreeCADGui.getDocument("assembly").getObject("part_nozzle").Transparency = 70 # insertion part_element_64 Mesh.insert(u"part_element_64.stl", "assembly") FreeCAD.getDocument("assembly").getObject("part_element_64").Placement = App.Placement(App.Vector(0, 0, 3), App.Rotation(App.Vector(0,0,1), 0)) FreeCADGui.getDocument("assembly").getObject("part_element_64").ShapeColor = (0.90,0.80,0.70) # insertion part_element_32 Mesh.insert(u"part_element_32.stl", "assembly") FreeCAD.getDocument("assembly").getObject("part_element_32").Placement = App.Placement(App.Vector(0, 0, -11), App.Rotation(App.Vector(0,0,1), 0)) FreeCADGui.getDocument("assembly").getObject("part_element_32").ShapeColor = (0.70,0.80,0.90) setview() __objs__ = [] __objs__.append(FreeCAD.getDocument("assembly").getObject("part_gas_entry")) __objs__.append(FreeCAD.getDocument("assembly").getObject("part_nozzle")) __objs__.append(FreeCAD.getDocument("assembly").getObject("part_element_64")) __objs__.append(FreeCAD.getDocument("assembly").getObject("part_element_32")) stl_file = u"assembly.stl" Mesh.export(__objs__, stl_file) del __objs__ """) time.sleep(3) pywinauto.mouse.click(button="left", coords=(460, 750)) time.sleep(3) pywinauto.mouse.click(button="left", coords=(70, 670)) time.sleep(3) pywinauto.keyboard.send_keys( 'exec{(}open{(}"assembly.py"{)}.read{(}{)}{)}' ) time.sleep(3) pywinauto.keyboard.send_keys('{ENTER}') if __name__ == '__main__': unittest.main()
25.444874
217
0.687799
1,881
13,155
4.564593
0.107921
0.033776
0.030748
0.044258
0.832635
0.758793
0.726648
0.643839
0.592826
0.590962
0
0.046103
0.178867
13,155
516
218
25.494186
0.74875
0.054884
0
0.583587
0
0.033435
0.757693
0.376027
0
0
0
0
0
1
0.015198
false
0
0.030395
0
0.048632
0.030395
0
0
0
null
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
533149b10cf773e731b2338a2c6ec70f5e802c6c
69
py
Python
neuralmonkey/__init__.py
Simon-Will/neuralmonkey
b686a9d302cb10eda5fca991e1d7ee6b9e84b75a
[ "BSD-3-Clause" ]
446
2016-06-16T11:31:12.000Z
2021-12-04T04:19:26.000Z
neuralmonkey/__init__.py
Simon-Will/neuralmonkey
b686a9d302cb10eda5fca991e1d7ee6b9e84b75a
[ "BSD-3-Clause" ]
673
2016-06-20T09:10:50.000Z
2020-10-13T17:37:34.000Z
neuralmonkey/__init__.py
Simon-Will/neuralmonkey
b686a9d302cb10eda5fca991e1d7ee6b9e84b75a
[ "BSD-3-Clause" ]
116
2016-06-16T18:23:13.000Z
2021-02-06T06:45:20.000Z
"""The neuralmonkey package is the root package of this project."""
34.5
68
0.73913
10
69
5.1
0.8
0
0
0
0
0
0
0
0
0
0
0
0.15942
69
1
69
69
0.87931
0.884058
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
53692160af71254a08a87d70e3e5a37ad2737a9b
171
py
Python
cli.py
shimataro/brocadefw
c49e6a448a83d47d3b7046194748a97c19a3e1d7
[ "MIT" ]
1
2015-11-22T06:27:56.000Z
2015-11-22T06:27:56.000Z
cli.py
shimataro/brocadefw
c49e6a448a83d47d3b7046194748a97c19a3e1d7
[ "MIT" ]
null
null
null
cli.py
shimataro/brocadefw
c49e6a448a83d47d3b7046194748a97c19a3e1d7
[ "MIT" ]
null
null
null
#!/usr/bin/python3.2 # -*- coding: utf-8 -*- """ コマンドラインインターフェース @author: shimataro """ def main(): return 0 if __name__ == "__main__": import sys sys.exit(main())
11.4
26
0.619883
22
171
4.454545
0.863636
0
0
0
0
0
0
0
0
0
0
0.028169
0.169591
171
14
27
12.214286
0.661972
0.450292
0
0
0
0
0.094118
0
0
0
0
0
0
1
0.2
true
0
0.2
0.2
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
1
1
0
0
4
7267aade2878283d4fb20c9eec7cb04ab913d4a8
426
py
Python
src/todo_or_die/exceptions.py
walshyb/todo-or-die-python
42abc2ae4f5f9e26dba77fe83420766fa2b4ce0d
[ "MIT" ]
2
2021-09-13T15:58:09.000Z
2021-09-22T03:45:33.000Z
src/todo_or_die/exceptions.py
walshyb/todo-or-die-python
42abc2ae4f5f9e26dba77fe83420766fa2b4ce0d
[ "MIT" ]
null
null
null
src/todo_or_die/exceptions.py
walshyb/todo-or-die-python
42abc2ae4f5f9e26dba77fe83420766fa2b4ce0d
[ "MIT" ]
null
null
null
from .helpers.generate_message import generate_message from warnings import warn class OverdueError(Exception): def __init__(self, message: str, date: str = ""): super().__init__(generate_message(message, date)) class OverdueWarning(Warning): def __init__(self, message: str, date: str = ""): self.output = generate_message(message, date) def __str__(self): return repr(self.output)
30.428571
57
0.699531
50
426
5.56
0.42
0.215827
0.079137
0.129496
0.201439
0.201439
0.201439
0
0
0
0
0
0.190141
426
14
58
30.428571
0.805797
0
0
0.2
1
0
0
0
0
0
0
0
0
1
0.3
false
0
0.2
0.1
0.8
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
7282e9fbcd024d6aae78f71206db5f11719ea2df
218
py
Python
stuart/models/abtract_model.py
GajovskiMaxime/stuart
361a92be3d380e9dfe8db12c7c3327061cc4818a
[ "BSD-3-Clause" ]
null
null
null
stuart/models/abtract_model.py
GajovskiMaxime/stuart
361a92be3d380e9dfe8db12c7c3327061cc4818a
[ "BSD-3-Clause" ]
9
2017-08-08T15:23:40.000Z
2017-08-17T15:05:42.000Z
stuart/models/abtract_model.py
GajovskiMaxime/stuart
361a92be3d380e9dfe8db12c7c3327061cc4818a
[ "BSD-3-Clause" ]
null
null
null
from stuart.database.crud_mixin import CRUDMixin from stuart.extensions import db class AbstractModel(CRUDMixin, db.Model): """Base model class that includes CRUD convenience methods.""" __abstract__ = True
24.222222
66
0.775229
27
218
6.074074
0.703704
0.121951
0
0
0
0
0
0
0
0
0
0
0.151376
218
8
67
27.25
0.886486
0.256881
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
728eeeadbe9b301e4eba6365bd3b12859b3f2704
109
py
Python
nestedSerializers/apps.py
RaghuDalal/Learning_Django_REST_Framework
11fcca359fbed00aecbbdade11f30eb4557edbe6
[ "MIT" ]
null
null
null
nestedSerializers/apps.py
RaghuDalal/Learning_Django_REST_Framework
11fcca359fbed00aecbbdade11f30eb4557edbe6
[ "MIT" ]
null
null
null
nestedSerializers/apps.py
RaghuDalal/Learning_Django_REST_Framework
11fcca359fbed00aecbbdade11f30eb4557edbe6
[ "MIT" ]
null
null
null
from django.apps import AppConfig class NestedserializersConfig(AppConfig): name = 'nestedSerializers'
18.166667
41
0.798165
10
109
8.7
0.9
0
0
0
0
0
0
0
0
0
0
0
0.137615
109
5
42
21.8
0.925532
0
0
0
0
0
0.155963
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
72ab75b81949b531f66cf693ec795806dcaf1622
1,424
py
Python
PyOpenGL-3.0.2/OpenGL/GL/ARB/blend_func_extended.py
frederica07/Dragon_Programming_Process
c0dff2e20c1be6db5adc6f9977efae8f7f888ef5
[ "BSD-2-Clause" ]
null
null
null
PyOpenGL-3.0.2/OpenGL/GL/ARB/blend_func_extended.py
frederica07/Dragon_Programming_Process
c0dff2e20c1be6db5adc6f9977efae8f7f888ef5
[ "BSD-2-Clause" ]
null
null
null
PyOpenGL-3.0.2/OpenGL/GL/ARB/blend_func_extended.py
frederica07/Dragon_Programming_Process
c0dff2e20c1be6db5adc6f9977efae8f7f888ef5
[ "BSD-2-Clause" ]
null
null
null
'''OpenGL extension ARB.blend_func_extended This module customises the behaviour of the OpenGL.raw.GL.ARB.blend_func_extended to provide a more Python-friendly API Overview (from the spec) Traditional OpenGL includes fixed-function blending that combines source colors with the existing content of a render buffer in a variety of ways. A number of extensions have enhanced this functionality by adding further sources of blending weights and methods to combine them. However, the inputs to the fixed-function blending units are constrained to a source color (as output from fragment shading), destination color (as the current content of the frame buffer) or constants that may be used in their place. This extension adds new blending functions whereby a fragment shader may output two colors, one of which is treated as the source color, and the other used as a blending factor for either source or destination colors. Furthermore, this extension increases orthogonality by allowing the SRC_ALPHA_SATURATE function to be used as the destination weight. The official definition of this extension is available here: http://www.opengl.org/registry/specs/ARB/blend_func_extended.txt ''' from OpenGL import platform, constants, constant, arrays from OpenGL import extensions, wrapper from OpenGL.GL import glget import ctypes from OpenGL.raw.GL.ARB.blend_func_extended import * ### END AUTOGENERATED SECTION
45.935484
77
0.813202
220
1,424
5.218182
0.531818
0.027875
0.041812
0.069686
0.054007
0.054007
0.054007
0
0
0
0
0
0.148876
1,424
31
78
45.935484
0.947195
0.928371
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
72aba4e949876455567612d6b5712b51084fca33
332
py
Python
Wasteland/api/encode_decode_save.py
Tonysun1/Explore-Z-Wasteland
77be93f8b7838cd0b6b42e03607ba5662fc25b4c
[ "MIT" ]
1
2018-10-24T03:48:12.000Z
2018-10-24T03:48:12.000Z
Wasteland/api/encode_decode_save.py
Tonysun-rpi/Explore-Z-Wasteland
77be93f8b7838cd0b6b42e03607ba5662fc25b4c
[ "MIT" ]
null
null
null
Wasteland/api/encode_decode_save.py
Tonysun-rpi/Explore-Z-Wasteland
77be93f8b7838cd0b6b42e03607ba5662fc25b4c
[ "MIT" ]
1
2018-10-22T19:14:38.000Z
2018-10-22T19:14:38.000Z
import api.save_and_load # NOTE: all files should be saved in the doc folder # add all save information into a string and call the save function # TODO: fill parameters def encode(): pass # call load function then # parse string and extract information from it # # num_save: the index of save file def decode(num_save): pass
18.444444
67
0.756024
56
332
4.410714
0.678571
0.072874
0
0
0
0
0
0
0
0
0
0
0.192771
332
17
68
19.529412
0.921642
0.71988
0
0.4
0
0
0
0
0
0
0
0.058824
0
1
0.4
false
0.4
0.2
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
1
0
0
1
0
0
4