hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
690c10eaee2b7b48bf3f1f8a1b83543fde67c56c
1,156
py
Python
Ago-Dic-2020/perez-sanchez-jose-jahir/Practica5/strategy.py
bryanbalderas/DAS_Sistemas
1e31f088c0de7134471025a5730b0abfc19d936e
[ "MIT" ]
41
2017-09-26T09:36:32.000Z
2022-03-19T18:05:25.000Z
Ago-Dic-2020/perez-sanchez-jose-jahir/Practica5/strategy.py
bryanbalderas/DAS_Sistemas
1e31f088c0de7134471025a5730b0abfc19d936e
[ "MIT" ]
67
2017-09-11T05:06:12.000Z
2022-02-14T04:44:04.000Z
Ago-Dic-2020/perez-sanchez-jose-jahir/Practica5/strategy.py
bryanbalderas/DAS_Sistemas
1e31f088c0de7134471025a5730b0abfc19d936e
[ "MIT" ]
210
2017-09-01T00:10:08.000Z
2022-03-19T18:05:12.000Z
import abc class InterfaceStrategy(metaclass = abc.ABCMeta): @abc.abstractmethod def authenticate(self): pass class BasicAuthConcreteStrategy(InterfaceStrategy): def __init__(self, usr, passwd): self.user = usr self.password = passwd def authenticate(self): return f'### Authenticated with Basic Auth\n\tUser: {self.user}\n\tPass: {self.password}' class OauthAuthConcreteStrategy(InterfaceStrategy): credefault = { "access_token": "una cadena muy larga", "token_type": "Bearer", "expires_in": 3600, "refresh_token": "una cadena muy larga 2", "scope": "readAndWrite" } def __init__(self, **kargs): self.credentials = kargs.get("credentials", self.credefault) def authenticate(self): pass class ApiKeyConcreteStrategy(InterfaceStrategy): def __init__(self, apoi_key): self.api_key = api_key def authenticate(self): return f'### Authenticated with API Key\n\tKey: {self.api}' class AunthContext(): pass def main(): pass if __name__ == "__main__": main()
24.083333
97
0.630623
120
1,156
5.85
0.458333
0.08547
0.108262
0.065527
0.264957
0.122507
0.122507
0
0
0
0
0.005828
0.257785
1,156
47
98
24.595745
0.812354
0
0
0.235294
0
0.029412
0.222318
0
0
0
0
0
0
1
0.235294
false
0.205882
0.029412
0.058824
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
690f34a1967481acd69cd45cacf6dc0240b195d2
2,569
py
Python
website/auth.py
njokuifeanyigerald/flask_auth
e077aee007f4894f2303f8d06ce3d77c21d82b95
[ "MIT" ]
null
null
null
website/auth.py
njokuifeanyigerald/flask_auth
e077aee007f4894f2303f8d06ce3d77c21d82b95
[ "MIT" ]
null
null
null
website/auth.py
njokuifeanyigerald/flask_auth
e077aee007f4894f2303f8d06ce3d77c21d82b95
[ "MIT" ]
null
null
null
from flask import Blueprint, render_template,request, flash, redirect from flask.helpers import url_for from .models import User, Note from werkzeug.security import generate_password_hash, check_password_hash from . import db from flask_login import login_user, logout_user, login_required, current_user # blueprint helps separte views auth = Blueprint('auth', __name__) @auth.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': email = request.form.get('email') password = request.form.get('password') user = User.query.filter_by(email=email).first() if user: # check the password hash if check_password_hash(user.password, password): flash('logged in successfully', category='success') login_user(user, remember=True) return redirect(url_for('views.home')) else: flash('incorrect password!', category='error') else: flash('email does not exist', category='error') if current_user.is_authenticated: return redirect (url_for('views.home')) return render_template('login.html', user=current_user) @auth.route('/register', methods=['GET', 'POST']) def register(): if request.method == 'POST': email = request.form.get('email') name = request.form.get('name') password = request.form.get('password') password2 = request.form.get('password2') user = User.query.filter_by(email=email).first() if user: flash('user already exist!', category='error') if len(email) < 4: flash('email must above 3 chracters', category='error') elif len(name) < 2: flash('name must be greater than a character', category='error') elif password != password2: flash('passwords must match', category='error') elif len(password) < 8: flash('password should be above 7 characters', category='error') else: new_user = User(email=email, name=name, password=generate_password_hash(password)) db.session.add(new_user) db.session.commit() flash('account created', category='success') return redirect(url_for('auth.login')) if current_user.is_authenticated: return redirect(url_for('views.home')) return render_template('signup.html', user=current_user) @login_required @auth.route('/logout') def logout(): logout_user() return redirect(url_for('auth.login'))
40.140625
94
0.639548
308
2,569
5.207792
0.298701
0.056733
0.052369
0.062344
0.293641
0.256234
0.201995
0.201995
0.201995
0.148379
0
0.00408
0.236668
2,569
64
95
40.140625
0.81387
0.020631
0
0.350877
1
0
0.168656
0
0
0
0
0
0
1
0.052632
false
0.192982
0.105263
0
0.280702
0.035088
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
691039a120e11478c17132915613ceb985f3a65b
436
py
Python
update_user_progress.py
sadakatsu/kgs-rip
d0db8492dd6fbb97e2b3a5923258eaf09e333b9c
[ "MIT" ]
1
2021-03-24T14:33:10.000Z
2021-03-24T14:33:10.000Z
update_user_progress.py
sadakatsu/kgs-rip
d0db8492dd6fbb97e2b3a5923258eaf09e333b9c
[ "MIT" ]
null
null
null
update_user_progress.py
sadakatsu/kgs-rip
d0db8492dd6fbb97e2b3a5923258eaf09e333b9c
[ "MIT" ]
null
null
null
from sqlite3 import Connection def update_user_progress(connection: Connection, user: str, last_year: int, last_month): try: with connection: cursor = connection.cursor() cursor.execute('update users set last_year = ?, last_month = ? where id = ?', [last_year, last_month, user]) cursor.close() print(f'Finished {user} {last_year}-{last_month}.') finally: pass
31.142857
120
0.626147
51
436
5.156863
0.529412
0.121673
0.136882
0.193916
0
0
0
0
0
0
0
0.003115
0.263761
436
13
121
33.538462
0.816199
0
0
0
0
0
0.229358
0.057339
0
0
0
0
0
1
0.1
false
0.1
0.1
0
0.2
0.1
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
6913850d05da9394c14a282f9d776916803d0bba
1,035
py
Python
src/d_model.py
rghosh8/oh-my-dog-
579c0765bf085e6e1d8e7aa64b7842d916aea505
[ "MIT" ]
null
null
null
src/d_model.py
rghosh8/oh-my-dog-
579c0765bf085e6e1d8e7aa64b7842d916aea505
[ "MIT" ]
null
null
null
src/d_model.py
rghosh8/oh-my-dog-
579c0765bf085e6e1d8e7aa64b7842d916aea505
[ "MIT" ]
null
null
null
import tensorflow as tf from static import * from tensorflow.keras import layers def d_model(): model = tf.keras.Sequential() model.add(layers.Conv2D(64, (4,4), strides=(2,2), padding='same', input_shape=[image_width,image_height,3],\ kernel_initializer=weight_init)) model.add(layers.BatchNormalization()) model.add(layers.LeakyReLU()) print(model.output_shape) model.add(layers.Conv2D(128, (4,4), strides=(2,2), padding='same',\ kernel_initializer=weight_init)) model.add(layers.BatchNormalization()) model.add(layers.LeakyReLU()) print(model.output_shape) model.add(layers.Conv2D(256, (4,4), strides=(2,2), padding='same',\ kernel_initializer=weight_init)) model.add(layers.BatchNormalization()) model.add(layers.LeakyReLU()) print(model.output_shape) model.add(layers.Flatten()) model.add(layers.Dense(1, activation='sigmoid')) print(model.output_shape) return model
33.387097
112
0.654106
127
1,035
5.220472
0.330709
0.13273
0.232278
0.126697
0.647059
0.647059
0.647059
0.613876
0.613876
0.613876
0
0.030377
0.204831
1,035
30
113
34.5
0.775213
0
0
0.541667
0
0
0.018375
0
0
0
0
0
0
1
0.041667
false
0
0.125
0
0.208333
0.166667
0
0
0
null
0
1
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
692b27670eedfbec970e72ce21d02af4320c2178
715
py
Python
pyansys/__init__.py
guyms/pyansys
7a9182a7c44098d9b99a0d1eb2fd183b7256ac01
[ "MIT" ]
1
2019-04-16T14:49:28.000Z
2019-04-16T14:49:28.000Z
pyansys/__init__.py
AdrlVmadriel/pyansys
3f97f970546f7cdf1f301f683c9a6bdcd92357c2
[ "MIT" ]
null
null
null
pyansys/__init__.py
AdrlVmadriel/pyansys
3f97f970546f7cdf1f301f683c9a6bdcd92357c2
[ "MIT" ]
null
null
null
import warnings from pyansys._version import __version__ from pyansys.archive import Archive, write_cmblock, write_nblock, save_as_archive from pyansys.binary_reader import * from pyansys.cyclic_reader import * from pyansys.binary_reader import FullReader from pyansys.cellquality import * from pyansys.convert import convert_script try: from pyansys.ansys import ANSYS from pyansys.ansys import change_default_ansys_path except Exception as e: try: from pyansys.ansysbase import ANSYS except: warnings.warn('Unable to load interactive ANSYS APDL module:\n\n%s' % str(e)) try: from pyansys import ansys has_ansys = ansys.check_valid_ansys() except: has_ansys = False
27.5
85
0.776224
99
715
5.40404
0.414141
0.226168
0.095327
0.085981
0.108411
0
0
0
0
0
0
0
0.169231
715
25
86
28.6
0.900673
0
0
0.238095
0
0
0.071329
0
0
0
0
0
0
1
0
false
0
0.571429
0
0.571429
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
69324dadcbe1f7ff86d3b27ce7f6affcdd7ea717
655
py
Python
tests/kyu_7_tests/test_sum_squares_of_numbers_in_list_that_may_contain_more_lists.py
the-zebulan/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
40
2016-03-09T12:26:20.000Z
2022-03-23T08:44:51.000Z
tests/kyu_7_tests/test_sum_squares_of_numbers_in_list_that_may_contain_more_lists.py
akalynych/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
null
null
null
tests/kyu_7_tests/test_sum_squares_of_numbers_in_list_that_may_contain_more_lists.py
akalynych/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
36
2016-11-07T19:59:58.000Z
2022-03-31T11:18:27.000Z
import unittest from katas.kyu_7.sum_squares_of_numbers_in_list_that_may_contain_more_lists \ import SumSquares class SumSquaresTestCase(unittest.TestCase): def test_equal_1(self): self.assertEqual(SumSquares([1, 2, 3]), 14) def test_equal_2(self): self.assertEqual(SumSquares([[1, 2], 3]), 14) def test_equal_3(self): self.assertEqual(SumSquares([[[[[[[[[1]]]]]]]]]), 1) def test_equal_4(self): self.assertEqual(SumSquares([10, [[10], 10], [10]]), 400) def test_equal_5(self): self.assertEqual(SumSquares( [1, [[3], 10, 5, [2, [3], [4], [5, [6]]]], [10]] ), 325)
27.291667
77
0.60916
89
655
4.247191
0.404494
0.092593
0.15873
0.383598
0.402116
0.243386
0.243386
0.243386
0.243386
0.243386
0
0.085106
0.210687
655
23
78
28.478261
0.646035
0
0
0
0
0
0
0
0
0
0
0
0.3125
1
0.3125
false
0
0.125
0
0.5
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
6935d1781fa987e73cc6a41212766edf28c55c0b
2,091
py
Python
tests/test_graphql_grapple.py
schrockn/graphscale
73bf0f588332b3bfb39261a4db3fe23a7a27a496
[ "MIT" ]
68
2017-07-10T19:08:25.000Z
2021-06-12T23:27:42.000Z
tests/test_graphql_grapple.py
schrockn/graphscale
73bf0f588332b3bfb39261a4db3fe23a7a27a496
[ "MIT" ]
19
2017-07-02T22:40:26.000Z
2017-07-04T19:58:37.000Z
tests/test_graphql_grapple.py
schrockn/graphscale
73bf0f588332b3bfb39261a4db3fe23a7a27a496
[ "MIT" ]
3
2017-11-22T09:41:52.000Z
2019-03-09T22:44:28.000Z
from typing import Any from graphscale.grapple.graphql_printer import print_graphql_defs from graphscale.grapple.parser import parse_grapple def assert_graphql_def(snapshot: Any, graphql: str) -> None: result = print_graphql_defs(parse_grapple(graphql)) snapshot.assert_match(result) def test_basic_type(snapshot: Any) -> None: assert_graphql_def(snapshot, """type Test { name: String }""") def test_non_pythonic_name(snapshot: Any) -> None: assert_graphql_def(snapshot, """type Test { longName: String }""") def test_nonnullable_type(snapshot: Any) -> None: assert_graphql_def(snapshot, """type Test { name: String! }""") def test_list_type(snapshot: Any) -> None: assert_graphql_def(snapshot, """type Test { names: [String] }""") def test_list_of_reqs(snapshot: Any) -> None: assert_graphql_def(snapshot, """type Test { names: [String!] }""") def test_req_list(snapshot: Any) -> None: assert_graphql_def(snapshot, """type Test { names: [String]! }""") def test_req_list_of_reqs(snapshot: Any) -> None: assert_graphql_def(snapshot, """type Test { names: [String!]! }""") def test_double_list(snapshot: Any) -> None: assert_graphql_def(snapshot, """type Test { matrix: [[String]] }""") def test_ref_to_self(snapshot: Any) -> None: assert_graphql_def(snapshot, """type Test { other: Test }""") def test_args(snapshot: Any) -> None: assert_graphql_def(snapshot, """type Test { relatives(skip: Int, take: Int) : [Test] }""") def test_args_defaults(snapshot: Any) -> None: assert_graphql_def( snapshot, """type Test { many_args( defaultTen: Int = 10, defaultTwenty: Int = 20, defaultZero: Int = 0, strArg: String = "foo", defaultTrue: Boolean = true, defaultFalse: Boolean = false, ) : [Test] }""" ) def test_enum(snapshot: Any) -> None: assert_graphql_def( snapshot, """ type Hospital { status: HospitalStatus reqStatus: HospitalStatus! } enum HospitalStatus { AS_SUBMITTED } """ )
26.1375
94
0.66045
252
2,091
5.230159
0.261905
0.128225
0.157815
0.236722
0.540212
0.540212
0.540212
0.540212
0.507587
0.364947
0
0.002996
0.201817
2,091
79
95
26.468354
0.786699
0
0
0.039216
0
0
0.340985
0
0
0
0
0
0.27451
1
0.254902
false
0
0.058824
0
0.313725
0.039216
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
2
694841ad21794bc3b949bf93528580956b704d0f
50
py
Python
venv/lib/python3.8/site-packages/PIL/_version.py
Joshua-Barawa/My-Photos
adcaea48149c6b31e9559b045709d538d0b749bc
[ "PostgreSQL", "Unlicense" ]
17
2022-03-23T18:30:33.000Z
2022-03-31T19:59:27.000Z
venv/lib/python3.8/site-packages/PIL/_version.py
Joshua-Barawa/My-Photos
adcaea48149c6b31e9559b045709d538d0b749bc
[ "PostgreSQL", "Unlicense" ]
5
2022-02-13T14:38:04.000Z
2022-02-15T00:13:07.000Z
venv/lib/python3.8/site-packages/PIL/_version.py
Joshua-Barawa/My-Photos
adcaea48149c6b31e9559b045709d538d0b749bc
[ "PostgreSQL", "Unlicense" ]
4
2022-03-24T00:52:16.000Z
2022-03-28T18:09:08.000Z
# Master version for Pillow __version__ = "9.0.1"
16.666667
27
0.72
8
50
4
0.875
0
0
0
0
0
0
0
0
0
0
0.071429
0.16
50
2
28
25
0.690476
0.5
0
0
0
0
0.217391
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
694ecf4dac6761dd754621b90998f8aad84da18b
165
py
Python
django_validajax/__init__.py
professorplumb/django-validajax
8bf806c850adfc707b9b50b8e7095071602c0b90
[ "MIT" ]
null
null
null
django_validajax/__init__.py
professorplumb/django-validajax
8bf806c850adfc707b9b50b8e7095071602c0b90
[ "MIT" ]
null
null
null
django_validajax/__init__.py
professorplumb/django-validajax
8bf806c850adfc707b9b50b8e7095071602c0b90
[ "MIT" ]
null
null
null
from .registration import FormRegistry __version__ = '0.0.1' default_app_config = 'django_validajax.apps.DjangoValidAJAXAppConfig' form_registry = FormRegistry()
20.625
69
0.818182
18
165
7.055556
0.888889
0
0
0
0
0
0
0
0
0
0
0.020134
0.09697
165
7
70
23.571429
0.832215
0
0
0
0
0
0.309091
0.278788
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
695f87e635d9e32e6ce17ed85ec0cbecb54f8ab0
4,287
py
Python
examples/python/stochastic_swap.py
biplab37/qiskit-aakash
e10b204887606f1f75bdfde182bb0c6d0a322c68
[ "Apache-2.0" ]
22
2019-08-15T04:39:15.000Z
2022-03-06T05:17:04.000Z
examples/python/stochastic_swap.py
biplab37/qiskit-aakash
e10b204887606f1f75bdfde182bb0c6d0a322c68
[ "Apache-2.0" ]
2
2020-10-26T07:12:12.000Z
2021-12-09T16:22:51.000Z
examples/python/stochastic_swap.py
biplab37/qiskit-aakash
e10b204887606f1f75bdfde182bb0c6d0a322c68
[ "Apache-2.0" ]
9
2019-09-05T05:33:00.000Z
2021-10-09T16:04:53.000Z
# -*- coding: utf-8 -*- # This code is part of Qiskit. # # (C) Copyright IBM 2017, 2019. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """Example of using the StochasticSwap pass.""" from qiskit.transpiler.passes import StochasticSwap from qiskit.transpiler import CouplingMap, Layout from qiskit.converters import circuit_to_dag, dag_to_circuit from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit coupling = CouplingMap([[0, 1], [1, 2], [1, 3]]) qr = QuantumRegister(2, 'q') ar = QuantumRegister(2, 'a') cr = ClassicalRegister(4, 'c') circ = QuantumCircuit(qr, ar, cr) circ.cx(qr[1], ar[0]) circ.cx(qr[0], ar[1]) circ.measure(qr[0], cr[0]) circ.h(qr) circ.h(ar) circ.cx(qr[0], qr[1]) circ.cx(ar[0], ar[1]) circ.measure(qr[0], cr[0]) circ.measure(qr[1], cr[1]) circ.measure(ar[0], cr[2]) circ.measure(ar[1], cr[3]) dag = circuit_to_dag(circ) # ┌─┐┌───┐ ┌─┐ # q_0: |0>─────────────────■──────────────────┤M├┤ H ├──■─────┤M├ # ┌───┐ │ └╥┘└───┘┌─┴─┐┌─┐└╥┘ # q_1: |0>──■───────┤ H ├──┼───────────────────╫──────┤ X ├┤M├─╫─ # ┌─┴─┐┌───┐└───┘ │ ┌─┐ ║ └───┘└╥┘ ║ # a_0: |0>┤ X ├┤ H ├───────┼─────────■─────┤M├─╫────────────╫──╫─ # └───┘└───┘ ┌─┴─┐┌───┐┌─┴─┐┌─┐└╥┘ ║ ║ ║ # a_1: |0>───────────────┤ X ├┤ H ├┤ X ├┤M├─╫──╫────────────╫──╫─ # └───┘└───┘└───┘└╥┘ ║ ║ ║ ║ # c_0: 0 ═══════════════════════════════╬══╬══╩════════════╬══╩═ # ║ ║ ║ # c_1: 0 ═══════════════════════════════╬══╬═══════════════╩════ # ║ ║ # c_2: 0 ═══════════════════════════════╬══╩════════════════════ # ║ # c_3: 0 ═══════════════════════════════╩═══════════════════════ # # ┌─┐┌───┐ ┌─┐ # q_0: |0>────────────────────■──┤M├┤ H ├──────────────────■──┤M├────── # ┌─┴─┐└╥┘└───┘┌───┐┌───┐ ┌─┴─┐└╥┘┌─┐ # q_1: |0>──■───X───────────┤ X ├─╫──────┤ H ├┤ X ├─X────┤ X ├─╫─┤M├─── # ┌─┴─┐ │ ┌───┐└───┘ ║ └───┘└─┬─┘ │ └───┘ ║ └╥┘┌─┐ # a_0: |0>┤ X ├─┼──────┤ H ├──────╫─────────────■───┼──────────╫──╫─┤M├ # └───┘ │ ┌───┐└───┘ ║ │ ┌─┐ ║ ║ └╥┘ # a_1: |0>──────X─┤ H ├───────────╫─────────────────X─┤M├──────╫──╫──╫─ # └───┘ ║ └╥┘ ║ ║ ║ # c_0: 0 ════════════════════════╩════════════════════╬═══════╩══╬══╬═ # ║ ║ ║ # c_1: 0 ═════════════════════════════════════════════╬══════════╩══╬═ # ║ ║ # c_2: 0 ═════════════════════════════════════════════╬═════════════╩═ # ║ # c_3: 0 ═════════════════════════════════════════════╩═══════════════ # # Layout from mapper: # {qr[0]: 0, # qr[1]: 1, # ar[0]: 2, # ar[1]: 3} # # 2 # | # 0 - 1 - 3 # Build the expected output to verify the pass worked expected = QuantumCircuit(qr, ar, cr) expected.cx(qr[1], ar[0]) expected.swap(qr[0], qr[1]) expected.cx(qr[1], ar[1]) expected.h(ar[1]) expected.h(ar[0]) expected.measure(qr[1], cr[0]) expected.h(qr[0]) expected.swap(qr[1], ar[1]) expected.h(ar[1]) expected.cx(ar[0], qr[1]) expected.measure(ar[0], cr[2]) expected.swap(qr[1], ar[1]) expected.measure(ar[1], cr[3]) expected.cx(qr[1], qr[0]) expected.measure(qr[1], cr[0]) expected.measure(qr[0], cr[1]) expected_dag = circuit_to_dag(expected) layout = Layout({qr[0]: 0, qr[1]: 1, ar[0]: 2, ar[1]: 3}) # Run the pass on the dag from the input circuit pass_ = StochasticSwap(coupling, layout, 20, 13) after = pass_.run(dag) # Verify the output of the pass matches our expectation assert expected_dag == after
39.694444
77
0.369489
569
4,287
4.706503
0.28471
0.015683
0.006721
0.005975
0.146751
0.095594
0.086632
0.072442
0.050037
0.03062
0
0.039528
0.268253
4,287
107
78
40.065421
0.456487
0.660369
0
0.186047
0
0
0.002154
0
0
0
0
0
0.023256
1
0
false
0.069767
0.093023
0
0.093023
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
6962ca3fda862acfdf63401897967239f46c0b98
1,502
py
Python
2016/day19.py
dopplershift/advent-of-code
dfb722c8600e902b65cb9f10a16c11a2ab40db8c
[ "MIT" ]
3
2020-12-10T02:45:51.000Z
2020-12-25T23:16:45.000Z
2016/day19.py
dopplershift/advent-of-code
dfb722c8600e902b65cb9f10a16c11a2ab40db8c
[ "MIT" ]
null
null
null
2016/day19.py
dopplershift/advent-of-code
dfb722c8600e902b65cb9f10a16c11a2ab40db8c
[ "MIT" ]
2
2020-12-02T03:37:04.000Z
2021-12-04T18:45:44.000Z
def collapse(n): elves = list(range(1, n + 1)) while len(elves) > 1: odd = len(elves) % 2 elves = elves[::2] if odd: elves.pop(0) return elves[0] def calc(n): p = 1 while p < n: p *= 2 return 2 * (n % (p // 2)) + 1 def collapse_across(n): ind = 0 elves = list(range(1, n + 1)) while len(elves) > 1: stolen = (ind + len(elves) // 2) % len(elves) elves.pop(stolen) ind += (1 - (stolen < ind)) if ind >= len(elves): ind = 0 return elves[0] def calc_across(n): p = 1 while p < n: p *= 3 m = p // 3 return (n // (2 * m) + 2 * (n // (3 * m))) * m + (n // m) * (n % m) if __name__ == '__main__': from aocd.models import Puzzle puz = Puzzle(2016, 19) assert collapse(5) == 3 assert collapse_across(2) == 1 assert collapse_across(3) == 3 assert collapse_across(4) == 1 assert collapse_across(5) == 2 assert collapse_across(6) == 3 assert collapse_across(7) == 5 assert collapse_across(8) == 7 assert collapse_across(9) == 9 assert collapse_across(10) == 1 for i in range(2, 1000): if collapse(i) != calc(i): print('next:', i) if collapse_across(i) != calc_across(i): print('across:', i) puz.answer_a = calc(int(puz.input_data)) print('Part 1:', puz.answer_a) puz.answer_b = calc_across(int(puz.input_data)) print('Part 2:', puz.answer_b)
23.107692
71
0.521305
224
1,502
3.370536
0.236607
0.203974
0.238411
0.083444
0.227815
0.227815
0.111258
0.082119
0.082119
0.082119
0
0.058881
0.321571
1,502
64
72
23.46875
0.682041
0
0
0.235294
0
0
0.022636
0
0
0
0
0
0.196078
1
0.078431
false
0
0.019608
0
0.176471
0.078431
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
15ca65767378ee9cdd236511fa09ad8e5f761668
657
py
Python
bot/exts/pride/drag_queen_name.py
imranshaji7/sir-lancebot
67caea1f737f86c2394cc9a280acbfe71588c917
[ "MIT" ]
1
2021-08-31T12:52:15.000Z
2021-08-31T12:52:15.000Z
bot/exts/pride/drag_queen_name.py
imranshaji7/sir-lancebot
67caea1f737f86c2394cc9a280acbfe71588c917
[ "MIT" ]
null
null
null
bot/exts/pride/drag_queen_name.py
imranshaji7/sir-lancebot
67caea1f737f86c2394cc9a280acbfe71588c917
[ "MIT" ]
1
2021-09-05T10:35:57.000Z
2021-09-05T10:35:57.000Z
import json import logging import random from pathlib import Path from discord.ext import commands from bot.bot import Bot log = logging.getLogger(__name__) NAMES = json.loads(Path("bot/resources/pride/drag_queen_names.json").read_text("utf8")) class DragNames(commands.Cog): """Gives a random drag queen name!""" @commands.command(name="dragname", aliases=("dragqueenname", "queenme")) async def dragname(self, ctx: commands.Context) -> None: """Sends a message with a drag queen name.""" await ctx.send(random.choice(NAMES)) def setup(bot: Bot) -> None: """Load the Drag Names Cog.""" bot.add_cog(DragNames())
24.333333
87
0.700152
91
657
4.967033
0.538462
0.059735
0.057522
0
0
0
0
0
0
0
0
0.001821
0.164384
657
26
88
25.269231
0.821494
0.085236
0
0
0
0
0.133945
0.075229
0
0
0
0
0
1
0.071429
false
0
0.428571
0
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
15e1383f9312e592e2b750f41de6e6a08e6b87ef
2,601
py
Python
app/test_main.py
nachitog83/nirvana_fastapi
5257cb9b965701f28d4443ff98c806e963054d08
[ "MIT" ]
null
null
null
app/test_main.py
nachitog83/nirvana_fastapi
5257cb9b965701f28d4443ff98c806e963054d08
[ "MIT" ]
null
null
null
app/test_main.py
nachitog83/nirvana_fastapi
5257cb9b965701f28d4443ff98c806e963054d08
[ "MIT" ]
null
null
null
from fastapi.testclient import TestClient import mock import json from .main import app client = TestClient(app) def test_read_main(): response = client.get("/") assert response.status_code == 200 assert response.json() == {"Hello": "Challenge"} class TestMostCommon(): def _side_effect(self, input, member_id): if "https://api1.com" in input: return {"deductible": 1000, "stop_loss": 10000, "oop_max": 5000} elif "https://api2.com" in input: return {"deductible": 1200, "stop_loss": 13000, "oop_max": 6000} elif "https://api3.com" in input: return {"deductible": 1000, "stop_loss": 10000, "oop_max": 6000} def test_calculate_true_values(self): with mock.patch("app.routers.get_api_results") as m: m.side_effect = self._side_effect response = client.get("/insurance/?id=1") print(response.text) assert response.json()['true_deductible'] == 1000 assert response.json()['true_stop_loss'] == 10000 assert response.json()['true_oop_max'] == 6000 class TestAverage(): def _side_effect(self, input, member_id): if "https://api1.com" in input: return {"deductible": 1000, "stop_loss": 10000, "oop_max": 5000} elif "https://api2.com" in input: return {"deductible": 1200, "stop_loss": 13000, "oop_max": 6000} elif "https://api3.com" in input: return {"deductible": 1000, "stop_loss": 10000, "oop_max": 6000} def test_calculate_true_values(self): with mock.patch("app.routers.get_api_results") as m: m.side_effect = self._side_effect response = client.get("/insurance/?id=1&method=average_values") assert response.json()['true_deductible'] == 1066.67 assert response.json()['true_stop_loss'] == 11000 assert response.json()['true_oop_max'] == 5666.67 class TestValidationError(): def _side_effect(self, input, member_id): if "https://api1.com" in input: return {"deductible": 1000, "stop_loss": 10000, "oop_max": 500} elif "https://api2.com" in input: return {"deductible": 1200, "stop_loss": 13000, "oop_max": 6000} elif "https://api3.com" in input: return {"deductible": 1000, "stop_loss": 10000, "oop_max": 6000} def test_calculate_true_values(self): with mock.patch("app.routers.get_api_results") as m: m.side_effect = self._side_effect response = client.get("/insurance/?id=2") assert response.status_code == 400
37.157143
76
0.625913
332
2,601
4.704819
0.21988
0.056338
0.057618
0.09219
0.78233
0.741357
0.667093
0.667093
0.667093
0.667093
0
0.082082
0.231834
2,601
69
77
37.695652
0.6997
0
0
0.54717
0
0
0.240677
0.045752
0
0
0
0
0.169811
1
0.132075
false
0
0.075472
0
0.433962
0.018868
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
15e18d88d5456e15197688ae73242a470ee064f4
226
py
Python
4_Control_Flow/1_Decision/C_if_elif_else.py
Oscar-Oliveira/Python3
fa791225a6810b75890d24407b73c5e1b514acbe
[ "MIT" ]
null
null
null
4_Control_Flow/1_Decision/C_if_elif_else.py
Oscar-Oliveira/Python3
fa791225a6810b75890d24407b73c5e1b514acbe
[ "MIT" ]
null
null
null
4_Control_Flow/1_Decision/C_if_elif_else.py
Oscar-Oliveira/Python3
fa791225a6810b75890d24407b73c5e1b514acbe
[ "MIT" ]
null
null
null
""" if elif else """ NUMBER = 17 value = int(input("input a value: ")) if value == NUMBER: print("Winner!!!") elif value > NUMBER: print("your number is greater") else: print("your number is lower")
16.142857
38
0.579646
30
226
4.366667
0.5
0.167939
0.244275
0.259542
0
0
0
0
0
0
0
0.011976
0.261062
226
13
39
17.384615
0.772455
0.053097
0
0
0
0
0.341969
0
0
0
0
0
0
1
0
false
0
0
0
0
0.375
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
15e3e3d36dc25ea21438a88703a3e4523071cb22
117
py
Python
gengive/__init__.py
sthagen/gengive
b3e49d91b5ac612a6131429364e0139ee3f9fed6
[ "MIT" ]
1
2022-01-11T20:55:51.000Z
2022-01-11T20:55:51.000Z
gengive/__init__.py
sthagen/gengive
b3e49d91b5ac612a6131429364e0139ee3f9fed6
[ "MIT" ]
2
2022-01-31T19:15:53.000Z
2022-02-03T22:15:48.000Z
gengive/__init__.py
sthagen/gengive
b3e49d91b5ac612a6131429364e0139ee3f9fed6
[ "MIT" ]
null
null
null
"""Render text (Danish: gengive tekst).""" __version__ = '2022.2.3' __version_info__ = tuple(__version__.split('.'))
29.25
48
0.700855
14
117
4.928571
0.857143
0
0
0
0
0
0
0
0
0
0
0.056604
0.094017
117
3
49
39
0.59434
0.307692
0
0
0
0
0.12
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
15fa8ff91fb281078d363c2c682d367918acb58c
1,324
py
Python
rainradar/config.py
dimitri-rebrikov/rainradar
ce5b1e9db979cb415ae4707ddf54f39e7643548b
[ "MIT" ]
null
null
null
rainradar/config.py
dimitri-rebrikov/rainradar
ce5b1e9db979cb415ae4707ddf54f39e7643548b
[ "MIT" ]
1
2021-10-06T13:28:05.000Z
2021-10-06T13:28:05.000Z
rainradar/config.py
dimitri-rebrikov/rainradar
ce5b1e9db979cb415ae4707ddf54f39e7643548b
[ "MIT" ]
null
null
null
try: import ujson as json except: import json from exception import RainradarException filePath="config.json" class Config: def __init__(self): self.config = { 'ssid':'change_me', 'password':'change_me', 'plz':'change_me' } def readConfig(self): try: fp = open(filePath, 'r') except Exception as e: print(repr(e)) raise RainradarException("ERR CONF FILE") else: with fp: try: self.config=json.load(fp) except Exception as e: print(repr(e)) raise RainradarException("ERR CONF JSON") def writeConfig(self): with open(filePath, 'w') as fp: print("Write config: " + json.dumps(self.config)) return json.dump(self.config, fp) def getSsid(self): return self.config['ssid'] def setSsid(self, ssid): self.config['ssid'] = ssid def getPassword(self): return self.config['password'] def setPassword(self, password): self.config['password'] = password def getPlz(self): return self.config['plz'] def setPlz(self, plz): self.config['plz'] = plz
24.518519
61
0.520393
139
1,324
4.906475
0.330935
0.146628
0.061584
0.087977
0.170088
0.170088
0.170088
0.170088
0.170088
0.170088
0
0
0.370091
1,324
53
62
24.981132
0.817746
0
0
0.166667
0
0
0.094411
0
0
0
0
0
0
1
0.214286
false
0.119048
0.071429
0.071429
0.404762
0.071429
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
15fb7777dcbbbccd2d2647f78f66d1560a5e6311
6,656
py
Python
bots/bot_template.py
Cooperw/DAPPin_that_Gas
84205cff13b2311c7c390748939db8c98624e695
[ "MIT" ]
1
2022-02-24T18:27:17.000Z
2022-02-24T18:27:17.000Z
bots/bot_template.py
Cooperw/DAPPin_that_Gas
84205cff13b2311c7c390748939db8c98624e695
[ "MIT" ]
null
null
null
bots/bot_template.py
Cooperw/DAPPin_that_Gas
84205cff13b2311c7c390748939db8c98624e695
[ "MIT" ]
null
null
null
''' a. shiloh ''' ################### # Imports ################### import hashlib import json import time import os from web3 import Web3 ################### # Variables ################### DEBUG = True relays = ['0x69429FB223b3BA3D5823B980E590bF857a680c13'] shepherds = ['0xb43114Bd08B4583b2a7e3ae4603831AB5fa6711f'] tags = [ #'init', #'dog', 'cat', #'fox', #'pelii' ] w3_providers = ['https://rinkeby.infura.io/v3/4ac3bf1caeb14c3e9c241c25ea1d3326'] w3 = None POLL_RATE = 5 #seconds Command_Count = {} Command_Hashes = [] Commands = [] ################### # Helpers ################### def Reload_W3_Connection(): global w3 w3 = Web3(Web3.HTTPProvider(w3_providers[0])) def log(msg, error = False, debug = False): head = '[INFO]' if error: head = '[ERROR]' if debug: head = '[DEBUG]' print(head + ' ' + str(msg)) def Save_Command(cmd_data): if cmd_data[0] not in Command_Hashes: Command_Hashes.append(cmd_data[0]) Commands.append(cmd_data) def Search_Sig(relay, shepherd, tag): return str(hashlib.md5((relay + shepherd + tag).encode('utf-8')).hexdigest()) def print_cmd_info(cmd, relay, shepherd, tag, w3_prov, sid): if DEBUG: print('') print('################################################') print('Relay:\t\t'+str(relay)) print('Shepherd:\t'+str(shepherd)) print('Tag/Channel:\t'+str(tag)) print('Web3:\t\t'+str(w3_prov)) print('Sequence Index:\t'+str(sid)) print('------------------------------------------------') print('OpCode:\t\t'+str(cmd[1].split('~')[0])) print('Param:\t\t'+str(cmd[1].split('~')[1])) print('################################################') print('') ################### # Primary OpCodes ################### def c0_Seconday_OpCodes(cmd): if DEBUG: log('0_Seconday_OpCodes: '+str(cmd), debug=True) pass def c1_Add_Tag(cmd): if DEBUG: log('1_Add_Tag: '+str(cmd), debug=True) pass def c2_Remove_Tag(cmd): if DEBUG: log('2_Remove_Tag: '+str(cmd), debug=True) pass def c3_Add_Shepherd(cmd): if DEBUG: log('3_Add_Shepherd: '+str(cmd), debug=True) pass def c4_Remove_Shepherd(cmd): if DEBUG: log('4_Remove_Shepherd: '+str(cmd), debug=True) pass def c5_Add_Relay(cmd): if DEBUG: log('5_Add_Relay: '+str(cmd), debug=True) pass def c6_Remove_Relay(cmd): if DEBUG: log('6_Remove_Relay: '+str(cmd), debug=True) pass def c7_Add_W3(cmd): if DEBUG: log('7_Add_W3: '+str(cmd), debug=True) pass def c8_Remove_W3(cmd): if DEBUG: log('8_Remove_W3: '+str(cmd), debug=True) pass def c9_Poll_Rate(cmd): if DEBUG: log('9_Poll_Rate: '+str(cmd), debug=True) pass def Route_Command(cmd): opcode = cmd[1].split('~')[0] if opcode == '0': c0_Seconday_OpCodes(cmd) if opcode == '1': c1_Add_Tag(cmd) if opcode == '2': c2_Remove_Tag(cmd) if opcode == '3': c3_Add_Shepherd(cmd) if opcode == '4': c4_Remove_Shepherd(cmd) if opcode == '5': c5_Add_Relay(cmd) if opcode == '6': c6_Remove_Relay(cmd) if opcode == '7': c7_Add_W3(cmd) if opcode == '8': c8_Remove_W3(cmd) if opcode == '9': c9_Poll_Rate(cmd) ################### # Web3 ################### relay_abi = [{"inputs":[],"name":"burn","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"bytes32","name":"_posthash","type":"bytes32"}],"name":"get_post","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_poster","type":"address"},{"internalType":"string","name":"_tag","type":"string"}],"name":"get_postcount_from_address_tag","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_poster","type":"address"},{"internalType":"string","name":"_tag","type":"string"},{"internalType":"uint256","name":"_id","type":"uint256"}],"name":"get_posthash_from_address_tag_id","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_poster","type":"address"},{"internalType":"string","name":"_tag","type":"string"}],"name":"get_posthashes_from_address_tag","outputs":[{"internalType":"bytes32[]","name":"","type":"bytes32[]"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"info","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"info_burn","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"info_contract_address","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"string","name":"_message","type":"string"},{"internalType":"string[]","name":"_tags","type":"string[]"}],"name":"push_post","outputs":[],"stateMutability":"nonpayable","type":"function"}] def Get_Relay(address): return Web3.toChecksumAddress(address) def Check_Post_Count(relay_contract, shepherd, tag): post_count = relay_contract.functions.get_postcount_from_address_tag(Web3.toChecksumAddress(shepherd), tag).call() return post_count def Get_Command(relay_contract, shepherd, tag, id): # Lookup hash cmd_hash = relay_contract.functions.get_posthash_from_address_tag_id(Web3.toChecksumAddress(shepherd), tag, int(id)).call() # Resolve hash cmd = relay_contract.functions.get_post(cmd_hash).call() return [cmd_hash, cmd] ################### # Main ################### def SCR_Listener(): Reload_W3_Connection() while True: # Poll messages for each relay -> shepherd -> tag for relay in relays: for shepherd in shepherds: for tag in tags: # Load Contract relay_contract = w3.eth.contract(address=Get_Relay(relay), abi=relay_abi) # Poll Command Count sig = Search_Sig(relay, shepherd, tag) count = Check_Post_Count(relay_contract, shepherd, tag) # Provision new sig counter if necessary try: Command_Count[sig] except: Command_Count[sig] = 0 # Read and Process New Commands while Command_Count[sig] < count: new_index = Command_Count[sig] + 1 cmd_data = Get_Command(relay_contract, shepherd, tag, new_index) print_cmd_info(cmd_data, relay, shepherd, tag, w3_providers[0], new_index) Command_Count[sig] = new_index Save_Command(cmd_data) Route_Command(cmd_data) time.sleep(POLL_RATE) ################### # Start ################### SCR_Listener()
29.714286
1,849
0.625601
827
6,656
4.83555
0.192261
0.023756
0.025006
0.032508
0.461365
0.303826
0.243061
0.169042
0.169042
0.169042
0
0.030714
0.124399
6,656
223
1,850
29.847534
0.655456
0.041466
0
0.10687
0
0
0.292098
0.056185
0
0
0.0138
0
0
1
0.152672
false
0.076336
0.038168
0.015267
0.221374
0.114504
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
c62aac3a07a527d0ca20fb33f42ce152813897f0
912
py
Python
functions.py
ahmadf21-meet/meet2019y1lab7
285426d692030c41adadecc202d708abc7b8f8f9
[ "MIT" ]
null
null
null
functions.py
ahmadf21-meet/meet2019y1lab7
285426d692030c41adadecc202d708abc7b8f8f9
[ "MIT" ]
null
null
null
functions.py
ahmadf21-meet/meet2019y1lab7
285426d692030c41adadecc202d708abc7b8f8f9
[ "MIT" ]
null
null
null
Python 3.6.8 (default, Jan 14 2019, 11:02:34) [GCC 8.0.1 20180414 (experimental) [trunk revision 259383]] on linux Type "help", "copyright", "credits" or "license()" for more information. >>> ====== RESTART: /home/student/ahmadf21_lab7/meet2019y1lab7/functions.py ====== what's your starting number?333 what's your end number?777 12003000 >>> ====== RESTART: /home/student/ahmadf21_lab7/meet2019y1lab7/functions.py ====== what's your starting number?333 what's your end number?777 Traceback (most recent call last): File "/home/student/ahmadf21_lab7/meet2019y1lab7/functions.py", line 12, in <module> answer=add_numbers(start,end) File "/home/student/ahmadf21_lab7/meet2019y1lab7/functions.py", line 9, in add_numbers for number in range(start,end+1): TypeError: must be str, not int >>> ====== RESTART: /home/student/ahmadf21_lab7/meet2019y1lab7/functions.py ====== what's your starting number?
41.454545
88
0.730263
133
912
4.954887
0.518797
0.08346
0.144158
0.174507
0.597876
0.597876
0.597876
0.597876
0.597876
0.427921
0
0.125155
0.115132
912
21
89
43.428571
0.69145
0
0
0.47619
0
0
0.152412
0.120614
0
0
0
0
0
0
null
null
0
0
null
null
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
c62cd1039a75334ef51e8d1bbdcbbe08bec0f99e
110
py
Python
test/test.py
ZhangirB121/pythonAss2
f7dc8b0200d44cd1865dfdf5f92291ad0b1f6af5
[ "MIT" ]
null
null
null
test/test.py
ZhangirB121/pythonAss2
f7dc8b0200d44cd1865dfdf5f92291ad0b1f6af5
[ "MIT" ]
null
null
null
test/test.py
ZhangirB121/pythonAss2
f7dc8b0200d44cd1865dfdf5f92291ad0b1f6af5
[ "MIT" ]
null
null
null
from main import ParseArticle prse=ParseArticle(); n=input('Write currency name: ') prse.collectingInform(n)
18.333333
32
0.781818
14
110
6.142857
0.785714
0
0
0
0
0
0
0
0
0
0
0
0.1
110
6
33
18.333333
0.868687
0
0
0
0
0
0.189189
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c62cf87d21bd901f44a2bfebb49c7ec037099eef
7,024
py
Python
csv_to_tikz.py
Crown0815/ESB-simulations
7ce44a9a43a6bd6a820ae0f674a5ac3fa90c5893
[ "MIT" ]
null
null
null
csv_to_tikz.py
Crown0815/ESB-simulations
7ce44a9a43a6bd6a820ae0f674a5ac3fa90c5893
[ "MIT" ]
null
null
null
csv_to_tikz.py
Crown0815/ESB-simulations
7ce44a9a43a6bd6a820ae0f674a5ac3fa90c5893
[ "MIT" ]
null
null
null
from csv_reader import * from glob import glob import matplotlib.pyplot as plt from scipy.signal import savgol_filter from scipy import optimize from math import * import numpy as np TRM_TIME = 0 TRM_DOWN_NORM = 2 TRM_DOWN_SMOOTH = 3 TRM_UP_NORM = 6 TRM_UP_SMOOTH = 7 path_origami_03_01V_g = '../Paper1_data/Data/Figure_3/OrigamiHybridization/-0.3-0.1V/Functionalization/ch4e3a-20170329-093634.dyn' path_origami_025_015V_g = '../Paper1_data/Data/Figure_3/OrigamiHybridization/-0.25-0.15V/Functionalization/ch4e3a-20170329-103229.dyn' path_origami_02_02V_g = '../Paper1_data/Data/Figure_3/OrigamiHybridization/-0.2-0.2V/Functionalization/ch4e3a-20170329-083358.dyn' path_origami_01_03V_g = '../Paper1_data/Data/Figure_3/OrigamiHybridization/-0.1-0.3V/Functionalization/ch4e3a-20170329-095453.dyn' path_origami_0_04V_g = '../Paper1_data/Data/Figure_3/OrigamiHybridization/0-0.4V/Functionalization/ch4e3a-20170329-105020.dyn' path_4hb_vrr_1 = '../Origami_Paper2/TRM/4HB/ch2e2a-20180808-181034.vrr' path_4hb_vrr_2 = '../Origami_Paper2/TRM/4HB/ch2e1a-20180808-181034.vrr' path_4hb_vrr_3 = '../Origami_Paper2/TRM/4HB/ch1e2g-20180802-115801.vrr' path_4hb_vrr_4 = '../Origami_Paper2/TRM/4HB/ch1e1g-20180802-115801.vrr' path_4hb_vrm = '../Origami_Paper2/TRM/4HB/preparedVrmData.csv' path_4hb_vrm_lukas_1 = '../Origami_Paper2/AdditionalDataLukas/4HB/Id0_AH315_20x_SF_-0.4+0.1V_0.5k.dat' path_4hb_vrm_lukas_2 = '../Origami_Paper2/AdditionalDataLukas/4HB/Id1_AH315_20x_SF_-0.4+0.1V_0.5k.dat' path_4hb_vrm_lukas_3 = '../Origami_Paper2/AdditionalDataLukas/4HB/Id2_AH315_20x_SF_-0.4+0.1V_0.5k.dat' path_4hb_vrm_lukas_4 = '../Origami_Paper2/AdditionalDataLukas/4HB/Id3_AH315_20x_SF_-0.4+0.1V_0.5k.dat' path_4hb_trm_1 = '../Origami_Paper2/TRM/4HB/ch2e1a-20180808-182721.trm' path_4hb_trm_2 = '../Origami_Paper2/TRM/4HB/ch2e2a-20180808-182721.trm' path_4hb_trm_3 = '../Origami_Paper2/TRM/4HB/ch2e1a-20180808-212204.trm' path_4hb_trm_4 = '../Origami_Paper2/TRM/4HB/ch2e2a-20180808-212204.trm' paths_4hb_mg_tit = sorted(glob("../Origami_Paper2/Mg_Titration/4HB/*.trm")) path_6hb_vrr_1 = '../Paper1_data/Data/Figure_2/Origami Calibration/Calibration/ch4e3a-20170329-160745.vrr' path_6hb_vrr_2 = '../Paper1_data/Data/Figure_2/Origami Calibration/Calibration/ch4e4a-20170329-160745.vrr' path_6hb_vrr_3 = '../Paper1_data/Data/Figure_2/Origami Calibration/Calibration/ch4e5a-20170329-160745.vrr' path_6hb_vrr_4 = '../Paper1_data/Data/Figure_2/Origami Calibration/Calibration/ch4e6a-20170329-160745.vrr' path_6hb_trm_1 = '../Paper1_data/Data/Figure_2/Origami Switching/ch4e3a-20170329-171550.trm' path_6hb_trm_2 = '../Paper1_data/Data/Figure_2/Origami Switching/ch4e4a-20170329-171550.trm' path_6hb_trm_3 = '../Paper1_data/Data/Figure_2/Origami Switching/ch4e5a-20170329-171550.trm' path_6hb_trm_4 = '../Paper1_data/Data/Figure_2/Origami Switching/ch4e6a-20170329-171550.trm' paths_6hb_mg_tit = sorted(glob("../Origami_Paper2/Mg_Titration/6HB/ch4e3a-*.trm")) path_48bp_trm_1 = '../Origami_Paper2/TRM/48bp/ch1e4g-20180829-170351.trm' path_48bp_trm_2 = '../Origami_Paper2/TRM/48bp/ch1e3g-20180829-170351.trm' path_48bp_trm_3 = '../Origami_Paper2/TRM/48bp/ch1e2g-20180829-170351.trm' path_48bp_trm_4 = '../Origami_Paper2/TRM/48bp/ch1e1g-20180829-170351.trm' path_48bp_vrr_1 = '../Paper1_data/Data/Figure_2/48mer Calibration/124618_Origami Buffer/ch1e3-20170410-124618.vrr' path_48bp_vrr_2 = '../Paper1_data/Data/Figure_2/48mer Calibration/124738_Origami Buffer/ch1e4-20170410-124738.vrr' path_48bp_vrr_3 = '../Paper1_data/Data/Figure_2/48mer Calibration/124908_Origami Buffer/ch1e5-20170410-124908.vrr' path_48bp_vrr_4 = '../Paper1_data/Data/Figure_2/48mer Calibration/125023_Origami Buffer/ch1e6-20170410-125023.vrr' paths_48bp_mg_tit = sorted(glob("../Origami_Paper2/Mg_Titration/48mer/*.trm")) path_96bp_trm_1 = '../Origami_Paper2/TRM/96bp/TRM_old/ch1e3a-20180816-153502.trm' path_96bp_trm_2 = '../Origami_Paper2/TRM/96bp/TRM_old/ch1e4a-20180816-153502.trm' path_96bp_trm_3 = '../Origami_Paper2/TRM/96bp/TRM_old/ch1e5a-20180816-153502.trm' path_96bp_trm_4 = '../Origami_Paper2/TRM/96bp/TRM_old/ch1e6a-20180816-153502.trm' path_96bp_vrr_1 = '../Origami_Paper2/TRM/96bp/ch1e3b-20180925-111215.vrr' path_96bp_vrr_2 = '../Origami_Paper2/TRM/96bp/ch1e4b-20180925-111215.vrr' path_96bp_vrr_3 = '../Origami_Paper2/TRM/96bp/ch1e5b-20180925-111215.vrr' path_96bp_vrr_4 = '../Origami_Paper2/TRM/96bp/ch1e6b-20180925-111215.vrr' paths_96bp_mg_tit = sorted(glob("../Origami_Paper2/Mg_Titration/96mer/*.trm")) path_trm_example = '../Paper1_data/Data/Figure_2/48mer_Switching/ch4e3b-20170330-081510.trm' path_staple = '../Paper1_data/Data/Figure_3/StapleHybridization/-02V to 02V 1Hz/Regeneration w 6HB_anchor/Functionalization/ch2e3b-20170327-173741.dyn' path_incubation_vs_live = '../OrigamiUnpublished/IncubationVsLiveImmobilization.csv' path_real_time_binding_dr = '../OrigamiUnpublished/RealTimeBindingInDynamicResponse.csv' path_size_comparison = '../OrigamiUnpublished/SizeComparisons.csv' # vrm_printer = VrmPrinter() # csv_average = CsvAverage() # csv_average.add_source(path_4hb_trm_1) # csv_average.add_source(path_4hb_trm_2) # csv_average.add_source(path_4hb_trm_3) # csv_average.add_source(path_4hb_trm_4) # csv_average.print(0, 3) # x = list(csv_average.values(TRM_TIME, -499.5)) + list(csv_average.values(TRM_TIME)) # y = list(csv_average.values(TRM_DOWN_NORM)) + list(csv_average.values(TRM_UP_NORM)) # y_smooth = savgol_filter(y, 11, 5) # SimpleTikZPrinter.print(x, y_smooth) # csv_average.print(0, 7) reader = SimpleCsv() path = path_size_comparison reader.read(path, ',') fig, (ax1) = plt.subplots(1) for index in range(0, 10, 2): x = list(reader.values(index+0)) y = list(reader.values(index+1)) print('') print('\\addplot [color=mycolor1] table{%') SimpleTikZPrinter.print(x, y) print("};\n") ax1.plot(x, y) # Fit for observable rate of Anti-Her2 # fitfunc = lambda p, var: ((var > p[2]) * p[0] * (1-np.exp(p[1] * (var-p[2])))) + p[3] # Target function # errfunc = lambda p, var, fix: fitfunc(p, var) - fix # Distance to the target function # p0 = (350, -0.3, 20, 1650.) # Initial guess for the parameters # p1, success = optimize.leastsq(errfunc, p0[:], args=(x, y)) # y_fit = [fitfunc(p1, x_i) for x_i in x] # ax1.plot(x, y_fit) # print(p1) # # print('\\addplot [color=mycolor1] table{%') # SimpleTikZPrinter.print(x, y_fit) # print("};\n") plt.show() # paths = paths_96bp_mg_tit # fig, (ax1, ax2) = plt.subplots(2) # for index, path in enumerate(paths, start=1): # if index % 2 == 0: # continue # reader.read(path) # color = "mycolor1!"+str(100*index/len(paths))+"!mycolor2" # print('\\addplot [color='+color+'] table{%') # x = list(reader.values(TRM_TIME, -100)) + list(reader.values(TRM_TIME)) # y = list(reader.values(TRM_DOWN_NORM)) + list(reader.values(TRM_UP_NORM)) # # y_smooth = savgol_filter(y, 7, 1) # SimpleTikZPrinter.print(x, y_smooth) # print("};\n") # # ax1.plot(x, y) # ax2.plot(x, y_smooth) # plt.show()
45.316129
151
0.765518
1,109
7,024
4.553652
0.204689
0.074653
0.066535
0.075248
0.551485
0.440792
0.26396
0.175248
0.062178
0.026931
0
0.161215
0.081577
7,024
154
152
45.61039
0.621609
0.226936
0
0
0
0.078947
0.658061
0.642831
0
0
0
0
0
1
0
false
0
0.092105
0
0.092105
0.052632
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
c63c1c5f048b86fd014d787b7736e777202ae6e7
3,925
py
Python
blog/models.py
sighill/blog
7ab126549398daf9dbf996a2ee9f96d7f3b44868
[ "MIT" ]
null
null
null
blog/models.py
sighill/blog
7ab126549398daf9dbf996a2ee9f96d7f3b44868
[ "MIT" ]
null
null
null
blog/models.py
sighill/blog
7ab126549398daf9dbf996a2ee9f96d7f3b44868
[ "MIT" ]
null
null
null
from django.db import models from django.contrib.auth.models import User from django.utils import timezone ##################################################################### class BlogPost(models.Model): ''' Defines the content of the blog posts. ''' uid= models.AutoField(primary_key = True , db_index = True) title= models.CharField(max_length = 255) corpus = models.TextField(blank=True, null=True) short_desc= models.CharField(max_length = 255, blank=True, null=True) img= models.CharField(max_length = 1000) media= models.ManyToManyField('Video', related_name='media_links', blank=True) is_visible= models.BooleanField(default= True) tags= models.ManyToManyField('Tag', related_name='tags', blank=True) created_date = models.DateTimeField(default=timezone.now) modified_date = models.DateTimeField(auto_now=True) # Methods def __str__(self): return str(self.title) ##################################################################### class Video(models.Model): ''' Defines the informations about a video displayed on the site. ''' # choice vars video_source_choices=( ('youtube', 'youtube'), ('youndcloud', 'soundcloud'), ('vimeo', 'vimeo'), ('other', 'iframe'), ) # attributes uid= models.AutoField(primary_key = True , db_index = True) title= models.CharField(max_length = 255) thumbnail= models.ImageField(upload_to='static/blog/video', blank=True, null=True) start_video_at= models.CharField( max_length = 50, default=0) source_site= models.CharField(max_length = 1000, choices= video_source_choices) source_id_string= models.CharField(max_length = 100) tags= models.ManyToManyField('Tag', related_name='video_tags', blank=True) created_date = models.DateTimeField(default=timezone.now) short_desc= models.CharField(max_length = 255, blank=True, null=True) # Methods def __str__(self): return str(self.title) def colorbox_link(self): if self.source_site== 'soundcloud': complete_link= 'https://soundcloud.com/{}'.format(self.source_id_string) elif self.source_site== 'youtube': complete_link= 'https://www.youtube.com/embed/{}?t={}s'.format(self.source_id_string, self.start_video_at) return complete_link ##################################################################### class Image(models.Model): ''' Images are a large part of this site. This table keeps track of every game image to make their use easy and flexible. ''' uid= models.AutoField( primary_key = True , db_index = True) title= models.CharField( max_length = 255) thumbnail= models.CharField( max_length = 500, blank=True, null=True) full_img=models.CharField( max_length = 500, blank=True, null=True) external_link= models.CharField( max_length = 500, blank=True, null=True) legend= models.CharField( max_length = 500, blank=True, null=True) # Methods def __str__(self): return str(self.title) ##################################################################### class Tag(models.Model): uid= models.AutoField(primary_key = True , db_index = True) title= models.CharField(max_length = 100) # Methods def __str__(self): return str(self.title) ##################################################################### class Galery(models.Model): uid= models.AutoField(primary_key = True , db_index = True) title= models.CharField(max_length = 100) is_visible= models.BooleanField(default=False) img_content= models.ManyToManyField( 'Image', related_name= 'galery_content', blank= True) legend= models.CharField( max_length = 500, blank=True, null=True) # Methods def __str__(self): return str(self.title)
39.646465
118
0.61758
452
3,925
5.172566
0.276549
0.102652
0.123182
0.164243
0.582549
0.494867
0.461506
0.461506
0.461506
0.392216
0
0.015718
0.189554
3,925
99
119
39.646465
0.719271
0.072102
0
0.408451
0
0
0.064166
0
0
0
0
0
0
1
0.084507
false
0
0.042254
0.070423
0.746479
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
c6454e71aa1ec2be9950bfc97b79bffc397a74b2
283
py
Python
Task 2/task2switch.py
electric-blue-green/YCC-IoT
0dace2862275c50bc0a4f1b73f2bb3924cd62609
[ "Unlicense" ]
2
2018-02-13T20:50:44.000Z
2018-02-24T14:56:20.000Z
Task 2/task2switch.py
aejb/YCC-IoT
0dace2862275c50bc0a4f1b73f2bb3924cd62609
[ "Unlicense" ]
1
2018-02-20T11:26:04.000Z
2018-02-20T11:26:04.000Z
Task 2/task2switch.py
aejb/YCC-IoT
0dace2862275c50bc0a4f1b73f2bb3924cd62609
[ "Unlicense" ]
1
2018-02-20T11:25:36.000Z
2018-02-20T11:25:36.000Z
# Sender from microbit import * import radio radio.on() while True: if button_a.was_pressed(): radio.send("a_on") display.scroll("+", wait=False) elif button_b.was_pressed(): radio.send("a_off") display.scroll("-", wait=False) sleep(20)
18.866667
39
0.611307
38
283
4.394737
0.605263
0.11976
0.179641
0.227545
0.239521
0
0
0
0
0
0
0.009302
0.240283
283
14
40
20.214286
0.767442
0.021201
0
0
0
0
0.04
0
0
0
0
0
0
1
0
true
0
0.181818
0
0.181818
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
c659f01509f390c6c10876e087643285df2149c3
1,059
py
Python
vanir/core/account/migrations/0002_auto_20211003_2119.py
guanana/vanir
b0bb9c874795a5803e6437ff0105ea036f1ae7b6
[ "Apache-2.0" ]
1
2022-01-19T07:11:05.000Z
2022-01-19T07:11:05.000Z
vanir/core/account/migrations/0002_auto_20211003_2119.py
guanana/vanir
b0bb9c874795a5803e6437ff0105ea036f1ae7b6
[ "Apache-2.0" ]
10
2021-11-07T14:17:07.000Z
2022-03-30T18:24:48.000Z
vanir/core/account/migrations/0002_auto_20211003_2119.py
guanana/vanir
b0bb9c874795a5803e6437ff0105ea036f1ae7b6
[ "Apache-2.0" ]
null
null
null
# Generated by Django 3.1.13 on 2021-10-03 21:19 from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('token', '0002_load_data'), ('account', '0001_initial'), ] operations = [ migrations.AddField( model_name='accounttokens', name='created_on', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( model_name='accounttokens', name='updated', field=models.DateTimeField(auto_now=True), ), migrations.AlterUniqueTogether( name='accounttokens', unique_together={('account', 'token')}, ), migrations.RemoveField( model_name='accounttokens', name='blockchain', ), migrations.RemoveField( model_name='accounttokens', name='update_time', ), ]
27.153846
93
0.576015
94
1,059
6.340426
0.553191
0.142617
0.147651
0.174497
0.409396
0.305369
0
0
0
0
0
0.032787
0.308782
1,059
38
94
27.868421
0.781421
0.043437
0
0.40625
1
0
0.151335
0
0
0
0
0
0
1
0
false
0
0.0625
0
0.15625
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d65db390a4d19cf07af209de73c3e3748b5d0e3a
1,115
py
Python
sovtoken/sovtoken/test/unittests/test_validation.py
dastardlychimp/token-plugin
8755bce1424e794285ea6e70bf9bdd05518667c3
[ "Apache-2.0" ]
null
null
null
sovtoken/sovtoken/test/unittests/test_validation.py
dastardlychimp/token-plugin
8755bce1424e794285ea6e70bf9bdd05518667c3
[ "Apache-2.0" ]
null
null
null
sovtoken/sovtoken/test/unittests/test_validation.py
dastardlychimp/token-plugin
8755bce1424e794285ea6e70bf9bdd05518667c3
[ "Apache-2.0" ]
1
2020-05-27T10:06:42.000Z
2020-05-27T10:06:42.000Z
import pytest from plenum.common.exceptions import InvalidClientMessageException from sovtoken.exceptions import ExtraFundsError from sovtoken.token_req_handler import TokenReqHandler def test_xfer_public_txn_equal(): TokenReqHandler._validate_xfer_public_txn(None, 1, 1) TokenReqHandler._validate_xfer_public_txn(None, 10, 10) TokenReqHandler._validate_xfer_public_txn(None, 100, 100) TokenReqHandler._validate_xfer_public_txn(None, 100000000000000, 100000000000000) TokenReqHandler._validate_xfer_public_txn(None, 9223372036854775807, 9223372036854775807) TokenReqHandler._validate_xfer_public_txn(None, 9223372036854775807000, 9223372036854775807000) def test_xfer_public_txn_inputs_not_greater(): with pytest.raises(ExtraFundsError): TokenReqHandler._validate_xfer_public_txn(None, 2, 1) with pytest.raises(InvalidClientMessageException): TokenReqHandler._validate_xfer_public_txn(None, 1, 2) with pytest.raises(InvalidClientMessageException): TokenReqHandler._validate_xfer_public_txn(None, 100000000000000000000000, 100000000000000000000001)
42.884615
107
0.834081
118
1,115
7.474576
0.29661
0.124717
0.162132
0.336735
0.557823
0.512472
0.240363
0.192744
0.192744
0.192744
0
0.17653
0.10583
1,115
25
108
44.6
0.708124
0
0
0.111111
0
0
0
0
0
0
0
0
0
1
0.111111
true
0
0.222222
0
0.333333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
d664ac5a285d6478ce5d8a5a693d95c877c57268
3,298
py
Python
solutions/LeetCode/Python3/20.py
timxor/leetcode-journal
5f1cb6bcc44a5bc33d88fb5cdb4126dfc6f4232a
[ "MIT" ]
854
2018-11-09T08:06:16.000Z
2022-03-31T06:05:53.000Z
solutions/LeetCode/Python3/20.py
timxor/leetcode-journal
5f1cb6bcc44a5bc33d88fb5cdb4126dfc6f4232a
[ "MIT" ]
29
2019-06-02T05:02:25.000Z
2021-11-15T04:09:37.000Z
solutions/LeetCode/Python3/20.py
timxor/leetcode-journal
5f1cb6bcc44a5bc33d88fb5cdb4126dfc6f4232a
[ "MIT" ]
347
2018-12-23T01:57:37.000Z
2022-03-12T14:51:21.000Z
__________________________________________________________________________________________________ 32ms class Solution: def isValid(self, s: 'str') -> 'bool': v = [] d = {']':'[', ')':'(', '}':'{'} for i in range(len(s)): if s[i] in d.values(): v.append(s[i]) elif s[i] in d.keys(): if len(v) == 0: return False elif v[-1] == d[s[i]]: v.pop() else: return False if v == []: return True else: return False __________________________________________________________________________________________________ 36ms class Solution: def isValid(self, s: str) -> bool: if not s: return True st = [] for ch in s: if ch == '[': st.append(']') elif ch == '{': st.append('}') elif ch == '(': st.append(')') elif not st or st.pop()!=ch: return False return st == [] __________________________________________________________________________________________________ 40ms class Solution: def isValid(self, s: str) -> bool: stack = [] bra = ('(', ')') cur = ('{', '}') squ = ('[', ']') brackets = [bra, cur, squ] for char in s: for b in brackets: if char == b[0]: stack.append(b[0]) elif char == b[1]: if len(stack) <= 0: return False last = stack.pop() if not last == b[0]: return False return len(stack) == 0 __________________________________________________________________________________________________ 12128 kb class Solution: def isValid(self, s: 'str') -> 'bool': dictt = {')': '(', ']' : '[', '}' : '{'} que = '' i = 0 while i < len(s): if s[i] in dictt: if que == '': return False elif que[-1] == dictt[s[i]]: que = que[:-1] i += 1 else: return False else: que += s[i] i += 1 if que == '': return True return False __________________________________________________________________________________________________ 12136 kb class Solution: def isValid(self, s: 'str') -> 'bool': dictt = {')': '(', ']' : '[', '}' : '{'} que = [] i = 0 while i < len(s): if s[i] in dictt: if que == []: return False elif que[-1] == dictt[s[i]]: que = que[:-1] i += 1 else: return False else: que.append(s[i]) i += 1 if que == []: return True return False __________________________________________________________________________________________________
30.82243
98
0.432686
263
3,298
3.190114
0.18251
0.15733
0.095352
0.137068
0.568534
0.568534
0.556615
0.556615
0.381406
0.381406
0
0.01864
0.446938
3,298
106
99
31.113208
0.441338
0
0
0.584906
0
0
0.015464
0
0
0
0
0
0
0
null
null
0
0
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
d67b1174f51ec72d490932be8e67f48e1c5a78fa
993
py
Python
PyFLOTRAN/tests/readers/test_StreamlineReader.py
aitirga/PyFLOTRAN
f601052a0686e89434ae91a917f218afa6c31528
[ "MIT" ]
2
2021-05-14T18:41:49.000Z
2021-10-05T15:06:31.000Z
PyFLOTRAN/tests/readers/test_StreamlineReader.py
aitirga/PyFLOTRAN
f601052a0686e89434ae91a917f218afa6c31528
[ "MIT" ]
6
2020-04-30T10:19:39.000Z
2022-03-21T18:16:14.000Z
PyFLOTRAN/tests/readers/test_StreamlineReader.py
aitirga/PyFLOTRAN
f601052a0686e89434ae91a917f218afa6c31528
[ "MIT" ]
1
2020-03-24T10:37:23.000Z
2020-03-24T10:37:23.000Z
import unittest from PyFLOTRAN.readers import StreamlineReader from PyFLOTRAN.config import config from PyFLOTRAN.utils import test_data_path from pathlib import Path class StreamlineReaderCase(unittest.TestCase): def setUp(self) -> None: self.stream_line_reader = StreamlineReader(filename=test_data_path() / config.streamline_reader.file) def test_read_case(self): self.assertEqual(self.stream_line_reader.raw_data["Points:0"].loc[0], 0.066709) def test_generate_streams(self): self.assertEqual(self.stream_line_reader.stream_data.get_group(0).iloc[1, 0], 1.0) def test_compute_arrival_times(self): arrival_times = self.stream_line_reader.compute_arrival_times() self.assertEqual(arrival_times[10], 14114.0) def test_write_csv(self): write_path = Path.cwd() / "test_streamlines.csv" self.stream_line_reader.dump_to_csv(write_path) write_path.unlink() if __name__ == '__main__': unittest.main()
32.032258
109
0.744209
135
993
5.140741
0.4
0.072046
0.100865
0.144092
0.112392
0.112392
0.112392
0
0
0
0
0.026284
0.1571
993
30
110
33.1
0.802867
0
0
0
0
0
0.036254
0
0
0
0
0
0.142857
1
0.238095
false
0
0.238095
0
0.52381
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d6901523ef69d9100a941ac8d18ab89a26542594
1,957
py
Python
res/candidates.py
RainMaker1707/SAT_in_python
1d69a652811f598221ed8425223f45669a3cbcc9
[ "MIT" ]
null
null
null
res/candidates.py
RainMaker1707/SAT_in_python
1d69a652811f598221ed8425223f45669a3cbcc9
[ "MIT" ]
null
null
null
res/candidates.py
RainMaker1707/SAT_in_python
1d69a652811f598221ed8425223f45669a3cbcc9
[ "MIT" ]
null
null
null
class Candidate: _inner_id = 0 def __init__(self, name=""): self._id = Candidate._inner_id self.name = name Candidate._inner_id += 1 CandidateList.append(self) @staticmethod def reset_id(): Candidate._inner_id = 0 class CandidateList: """ CandidateList has no constructor because it is used as a Singleton """ _inner = [] def __str__(self): """ transform the string reference of the list in a list of string object's reference to each candidate stored in it :return: a string (list format) of candidate references """ return str(CandidateList._inner) @staticmethod def append(to_add: Candidate): """ append a candidate on the tail of the CandidateList this function is automatically called when a candidate is created :param to_add: a Candidate object to store """ CandidateList._inner.append(to_add) @staticmethod def insert(to_add: Candidate, pos: int): """ Insert a candidate on the position specified in pos argument :param to_add: Candidate to insert in the list :param pos: the index where you want to store the candidate """ CandidateList._inner.insert(pos, to_add) @staticmethod def get(): """ :return: the list of candidate objects usable in program """ return CandidateList._inner @staticmethod def size(): """ :return: the size of the inner list or the number of candidates stored in the CandidateList """ return len(CandidateList._inner) @staticmethod def is_empty(): return len(CandidateList._inner) == 0 @staticmethod def empty_list(): """ Method to empty the CandidateList and reset the Candidate id """ CandidateList._inner = [] Candidate.reset_id()
27.180556
99
0.61676
230
1,957
5.095652
0.317391
0.08959
0.054608
0.084471
0
0
0
0
0
0
0
0.002956
0.308636
1,957
71
100
27.56338
0.863267
0.395503
0
0.212121
0
0
0
0
0
0
0
0
0
1
0.272727
false
0
0
0.030303
0.515152
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
d69ecef17b74655c708a841db60714eaa78dd753
499
py
Python
chat/models.py
nim65s/tutochan
33d77af45d6743bad82f3a1ed07871fc7eacb5f7
[ "BSD-2-Clause" ]
null
null
null
chat/models.py
nim65s/tutochan
33d77af45d6743bad82f3a1ed07871fc7eacb5f7
[ "BSD-2-Clause" ]
null
null
null
chat/models.py
nim65s/tutochan
33d77af45d6743bad82f3a1ed07871fc7eacb5f7
[ "BSD-2-Clause" ]
null
null
null
from django.conf import settings from django.db import models from ndh.models import Links, NamedModel, TimeStampedModel class Chan(TimeStampedModel, NamedModel, Links): pass class Message(TimeStampedModel): chan = models.ForeignKey(Chan, on_delete=models.CASCADE) user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) message = models.TextField() def __str__(self): return f'{self.chan} | {self.created:%x %X} - {self.user}: {self.message}'
27.722222
82
0.737475
63
499
5.714286
0.47619
0.055556
0.077778
0.116667
0
0
0
0
0
0
0
0
0.152305
499
17
83
29.352941
0.851064
0
0
0
0
0.090909
0.128257
0
0
0
0
0
0
1
0.090909
false
0.090909
0.272727
0.090909
0.909091
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
2
d6ada5b99b4b9d2823590b6d87c0f34fcb4deb5f
201
py
Python
problem2_optimal tour/example.py
mrwrongwrongwrong/heuristc-problem-solving
9b86135fd7507299edc74ccc74d96fb7473070ec
[ "Apache-2.0" ]
null
null
null
problem2_optimal tour/example.py
mrwrongwrongwrong/heuristc-problem-solving
9b86135fd7507299edc74ccc74d96fb7473070ec
[ "Apache-2.0" ]
null
null
null
problem2_optimal tour/example.py
mrwrongwrongwrong/heuristc-problem-solving
9b86135fd7507299edc74ccc74d96fb7473070ec
[ "Apache-2.0" ]
null
null
null
import OptimalTouring as Game x = Game.OptimalTouring("sites.txt") i = 1 while x.getTime() < x.getDay()*1440: x.sendMove(siteId=i) x.sendMove(delayTime=240) i += 1 x.settlement()
20.1
37
0.641791
29
201
4.448276
0.62069
0.031008
0
0
0
0
0
0
0
0
0
0.056604
0.208955
201
9
38
22.333333
0.754717
0
0
0
0
0
0.046875
0
0
0
0
0
0
1
0
false
0
0.125
0
0.125
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d6b6db72ac38228a6497d52d593f993cd52cc7bc
445
py
Python
InstagramAPI/__init__.py
Afafabdb/Instagram-API-python
0681b664ced73ee6d881e7a1bbca3e9c8da601b1
[ "MIT" ]
17
2017-06-29T22:11:25.000Z
2022-03-30T15:17:03.000Z
InstagramAPI/__init__.py
Afafabdb/Instagram-API-python
0681b664ced73ee6d881e7a1bbca3e9c8da601b1
[ "MIT" ]
7
2017-07-03T13:54:40.000Z
2022-03-20T10:40:06.000Z
InstagramAPI/__init__.py
Afafabdb/Instagram-API-python
0681b664ced73ee6d881e7a1bbca3e9c8da601b1
[ "MIT" ]
15
2017-07-24T14:40:51.000Z
2022-02-11T09:02:51.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from .instagram_api import InstagramAPI from .endpoints import InstagramAPIEndPoints try: # Developers can add a credentials file to enable the tests and examples to log in. # It might not be present. from . import credentials except ImportError: credentials = None __all__ = InstagramAPI, InstagramAPIEndPoints, credentials
27.8125
88
0.734831
54
445
5.888889
0.759259
0
0
0
0
0
0
0
0
0
0
0.002817
0.202247
445
15
89
29.666667
0.890141
0.334831
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.625
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
2
d6bd1bf21b9c047439d14ce2dd1b0ce1994cc287
435
py
Python
5 kyu/Integers Recreation One.py
mwk0408/codewars_solutions
9b4f502b5f159e68024d494e19a96a226acad5e5
[ "MIT" ]
6
2020-09-03T09:32:25.000Z
2020-12-07T04:10:01.000Z
5 kyu/Integers Recreation One.py
mwk0408/codewars_solutions
9b4f502b5f159e68024d494e19a96a226acad5e5
[ "MIT" ]
1
2021-12-13T15:30:21.000Z
2021-12-13T15:30:21.000Z
5 kyu/Integers Recreation One.py
mwk0408/codewars_solutions
9b4f502b5f159e68024d494e19a96a226acad5e5
[ "MIT" ]
null
null
null
def list_squared(m, n): data=[[1, 1], [42, 2500], [246, 84100],[287, 84100],[728, 722500], [1434, 2856100], [1673, 2856100], [1880, 4884100],[4264, 24304900],[6237, 45024100], [9799, 96079204], [9855, 113635600]] result=[] index=0 while True: if m<=data[index][0]<=n: result.append(data[index]) index+=1 if index==len(data): break return result
29
192
0.526437
54
435
4.222222
0.685185
0.052632
0
0
0
0
0
0
0
0
0
0.381877
0.289655
435
15
193
29
0.355987
0
0
0
0
0
0
0
0
0
0
0
0
1
0.090909
false
0
0
0
0.181818
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d6d60f8a8cd3ab63f319b297895a5243fa157611
498
py
Python
knox/serializers.py
liradb2000/django-rest-knox
2120bdb44173db121611387b9e1a2e8e358b0123
[ "MIT" ]
null
null
null
knox/serializers.py
liradb2000/django-rest-knox
2120bdb44173db121611387b9e1a2e8e358b0123
[ "MIT" ]
null
null
null
knox/serializers.py
liradb2000/django-rest-knox
2120bdb44173db121611387b9e1a2e8e358b0123
[ "MIT" ]
null
null
null
from django.contrib.auth import get_user_model from rest_framework import serializers from knox.models import AuthToken User = get_user_model() username_field = User.USERNAME_FIELD if hasattr(User, 'USERNAME_FIELD') else 'username' class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = (username_field,) class AuthTokenSerializer(serializers.ModelSerializer): class Meta: model = AuthToken fields = ('created', 'token_key')
29.294118
87
0.746988
57
498
6.350877
0.491228
0.143646
0.066298
0.19337
0.220994
0
0
0
0
0
0
0
0.174699
498
17
88
29.294118
0.880779
0
0
0.153846
0
0
0.076152
0
0
0
0
0
0
1
0
false
0
0.230769
0
0.538462
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
d6ebe5579cf805caaff2ed27b36845dba1d43132
3,095
py
Python
custom1/patches/v10/reset_default_icons.py
jof2jc/custom1
e7dc6ddaf4a40688082e84f69a9ea4bce13cb2ac
[ "MIT" ]
null
null
null
custom1/patches/v10/reset_default_icons.py
jof2jc/custom1
e7dc6ddaf4a40688082e84f69a9ea4bce13cb2ac
[ "MIT" ]
null
null
null
custom1/patches/v10/reset_default_icons.py
jof2jc/custom1
e7dc6ddaf4a40688082e84f69a9ea4bce13cb2ac
[ "MIT" ]
null
null
null
from __future__ import unicode_literals import frappe from frappe import _, scrub from frappe.utils import nowdate, nowtime, now_datetime, flt, cstr, formatdate, get_datetime, add_days, getdate, get_time from frappe.utils.dateutils import parse_date from frappe.model.naming import make_autoname import json import datetime from erpnext.setup.utils import get_exchange_rate from erpnext.accounts.utils import get_account_currency, get_balance_on import erpnext.accounts.utils from re import sub from decimal import Decimal def execute(): frappe.db.sql ("""DELETE from `tabDesktop Icon` where standard=0 and module_name not in ('Accounts','Selling','Buying','Stock','Setup'""") #frappe.db.sql ("""Update `tabDesktop Icon` set hidden=0, blocked=0 where standard=1""") #frappe.db.sql ("""Update `tabDesktop Icon` set standard=1, hidden=1, blocked=1 where module_name in ('Accounts','Selling','Buying','Stock','Setup')""") frappe.db.sql ("""Update `tabDesktop Icon` set hidden=1, blocked=1 where module_name in ('File Manager','Tools','ptdun','Report','Website','CRM','Integrations', 'Email Inbox','Issue','Lead','Profit and Loss Statement','Profit and Loss Statment','Human Resources','Manufacturing', 'POS', 'Leaderboard','Support','Learn','Maintenance','Account Receivable','Account Payable', 'Student','Student Group', 'Course Schedule','Sales Register', 'Student Attendance', 'Course', 'Student Attendance Tool', 'Program', 'Student Applicant', 'Examination', 'Assessment', 'Fees', 'Instructor', 'Room', 'Schools', 'Healthcare', 'Education', 'Hub', 'Data Import Tool', 'Restaurant', 'Agriculture', 'Crop', 'Crop Cycle', 'Fertilizer', 'Land Unit', 'Disease', 'Plant Analysis', 'Soil Analysis', 'Water Analysis', 'Soil Texture', 'Weather', 'Grant Application', 'Donor', 'Volunteer', 'Member','Chapter', 'Delivery Note Trends', 'Non Profit')""") #frappe.db.sql ("""Update `tabDesktop Icon` set hidden=1, blocked=1 where # module_name in ('Custom1','Item','Customer','Supplier','Sales Order','Delivery Note','Sales Invoice', # 'Purchase Order','Purchase Receipt','Purchase Invoice','Payment Entry','Journal Entry','Note','ToDo', 'Task', # 'Stock Entry','Stock Reconciliation','Item Summary','Item Price','Chart of Accounts','Item wise Sales Register', # 'Stock Reorder Projection','IMEI', 'Project','Projects', 'Product Bundle', 'Warehouse', 'Terms and Conditions', # 'Sales Overview By Period', 'Adresses and Contacts', 'Material Request', 'Quotation', 'Supplier Quotation')""") #frappe.db.sql ("""Update `tabDesktop Icon` set hidden=1 where standard=1 and # module_name in ('Accounts','Selling','Buying','Stock','Setup')""") if frappe.db.exists("DocType", "Payment Term"): frappe.db.sql ("""Update `tabCustomer Group` set payment_terms=''""") frappe.db.sql ("""Update `tabSupplier Type` set payment_terms=''""") frappe.db.sql ("""Update `tabCompany` set payment_terms=''""") doc = frappe.get_doc("Stock Settings") doc.show_barcode_field = 0 doc.save() doc = frappe.get_doc("Website Settings") doc.home_page = "desk" doc.save()
58.396226
153
0.717286
396
3,095
5.527778
0.472222
0.036546
0.045226
0.062129
0.205573
0.205573
0.205573
0.160804
0.127455
0.108725
0
0.005457
0.111793
3,095
53
154
58.396226
0.790833
0.326656
0
0.058824
0
0.176471
0.592771
0.106506
0
0
0
0.018868
0
1
0.029412
false
0
0.411765
0
0.441176
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
2
ba36fcf40b2680fbc57f7aeecea43f5e9606281a
10,253
py
Python
MoinMoin/auth/_tests/test_auth.py
RealTimeWeb/wikisite
66a22c68c172f0ebb3c88a9885ccd33e2d59c3c5
[ "Apache-2.0" ]
null
null
null
MoinMoin/auth/_tests/test_auth.py
RealTimeWeb/wikisite
66a22c68c172f0ebb3c88a9885ccd33e2d59c3c5
[ "Apache-2.0" ]
null
null
null
MoinMoin/auth/_tests/test_auth.py
RealTimeWeb/wikisite
66a22c68c172f0ebb3c88a9885ccd33e2d59c3c5
[ "Apache-2.0" ]
1
2020-01-09T04:53:32.000Z
2020-01-09T04:53:32.000Z
# -*- coding: utf-8 -*- """ MoinMoin - MoinMoin.auth and session tests @copyright: 2008 MoinMoin:ThomasWaldmann @license: GNU GPL, see COPYING for details. """ import py.test from MoinMoin.web.request import TestRequest, evaluate_request from MoinMoin import wsgiapp from MoinMoin._tests import wikiconfig class AuthTest: """ test misc. auth methods """ PAGES = ['FrontPage', 'MoinMoin', 'HelpContents', 'WikiSandBox', ] # must all exist! def setup_class(cls): """ Stuff that should be run to init the state of this test class Some test needs specific config values, or they will fail. """ def teardown_class(cls): """ Stuff that should run to clean up the state of this test class """ def run_request(self, **params): request = TestRequest(**params) #XXX: config passing hack request.given_config = getattr(self, 'Config', wikiconfig.Config) context = wsgiapp.init(request) wsgiapp.run(context) return context class TestNoAuth(AuthTest): def testNoAuth(self): """ run a simple request, no auth, just check if it succeeds """ request = self.run_request() # anon user? assert not request.user.valid appiter, status, headers = evaluate_request(request.request) # check if the request resulted in normal status, result headers and content assert status[:3] == '200' has_ct = has_v = has_cc = False for k, v in headers: if k == 'Content-Type': assert v.startswith('text/html') has_ct = True elif k == 'Vary': assert 'Cookie' in v assert 'Accept-Language' in v has_v = True elif k == 'Cache-Control': assert 'public' in v has_cc = True elif k == 'Set-Cookie': cookie = v assert has_ct assert has_v # XXX BROKEN?: #assert has_cc # cache anon user's content assert '</html>' in ''.join(appiter) class TestAnonSession(AuthTest): class Config(wikiconfig.Config): cookie_lifetime = 1, 12 def testAnonSession(self): """ run some requests, no auth, check if anon sessions work """ cookie = '' trail_expected = [] first = True for pagename in self.PAGES: environ_overrides = {'HTTP_COOKIE': cookie} request = self.run_request(path='/%s' % pagename, environ_overrides=environ_overrides) # anon user? assert not request.user.valid # Do we have a session? assert request.session is not None appiter, status, headers = evaluate_request(request.request) # check if the request resulted in normal status, result headers and content assert status[:3] == '200' has_ct = has_v = has_cc = False for k, v in headers: if k == 'Content-Type': assert v.startswith('text/html') has_ct = True elif k == 'Vary': assert 'Cookie' in v assert 'Accept-Language' in v has_v = True elif k == 'Cache-Control': assert 'private' in v assert 'must-revalidate' in v has_cc = True elif k == 'Set-Cookie': cookie = v assert has_ct assert has_v # XX BROKEN #assert not has_cc # do not cache anon user's (with session!) content assert '</html>' in ''.join(appiter) # The trail is only ever saved on the second page display # because otherwise anonymous sessions would be created # for every request, even if it never sent a cookie! # Hence, skip over the first request and only verify # the trail for the second and following. if first: first = False continue assert not request.session.is_new trail_expected.append(unicode(pagename)) # Requested pagenames get into trail? assert 'trail' in request.session trail = request.session['trail'] assert trail == trail_expected class TestHttpAuthSession(AuthTest): py.test.skip("We currently have no http auth code in moin. GivenAuth relies on the web server doing the http auth check.") class Config(wikiconfig.Config): from MoinMoin.auth.http import HttpAuth # does not exist (yet?) auth = [HttpAuth(autocreate=True)] def testHttpAuthSession(self): """ run some requests with http auth, check whether session works """ username = u'HttpAuthTestUser' auth_info = u'%s:%s' % (username, u'testpass') auth_header = 'Basic %s' % auth_info.encode('base64') cookie = '' trail_expected = [] first = True for pagename in self.PAGES: environ_overrides = {'HTTP_COOKIE': cookie, 'HTTP_AUTHORIZATION': auth_header} request = self.run_request(path='/%s' % pagename, environ_overrides=environ_overrides) # Login worked? assert request.user.valid assert request.user.name == username # Do we have a session? assert request.session is not None appiter, status, headers = evaluate_request(request.request) # check if the request resulted in normal status, result headers and content assert status[:3] == '200' has_ct = has_v = has_cc = False for k, v in request.headers: if k == 'Content-Type': assert v.startswith('text/html') has_ct = True elif k == 'Vary': assert 'Cookie' in v assert 'Accept-Language' in v has_v = True elif k == 'Cache-Control': assert 'private' in v assert 'must-revalidate' in v has_cc = True elif k == 'Set-Cookie': cookie = v assert has_ct assert has_v assert has_cc # do not cache logged-in user's content assert '</html>' in ''.join(appiter) # The trail is only ever saved on the second page display # because otherwise anonymous sessions would be created # for every request, even if it never sent a cookie! # Hence, skip over the first request and only verify # the trail for the second and following. if first: first = False continue trail_expected.append(unicode(pagename)) # Requested pagenames get into trail? assert 'trail' in request.session trail = request.session['trail'] assert trail == trail_expected class TestMoinAuthSession(AuthTest): class Config(wikiconfig.Config): from MoinMoin.auth import MoinAuth auth = [MoinAuth()] def testMoinAuthSession(self): """ run some requests with MoinAuth, check whether session works """ from MoinMoin.user import User username = u'MoinAuthTestUser' password = u'ßecretß' User(self.request, name=username, password=password).save() # create user trail_expected = [] first = True for pagename in self.PAGES: if first: formdata = { 'name': username, 'password': password, 'login': 'login', } request = self.run_request(path='/%s' % pagename, query_string='login=login', method='POST', form_data=formdata) else: # not first page, use session cookie environ_overrides = {'HTTP_COOKIE': cookie} request = self.run_request(path='/%s' % pagename, environ_overrides=environ_overrides) # Login worked? assert request.user.valid assert request.user.name == username # Do we have a session? assert request.session is not None appiter, status, headers = evaluate_request(request.request) # check if the request resulted in normal status, result headers and content assert status[:3] == '200' has_ct = has_v = has_cc = False for k, v in request.headers: if k == 'Content-Type': assert v.startswith('text/html') has_ct = True elif k == 'Vary': assert 'Cookie' in v assert 'Accept-Language' in v has_v = True elif k == 'Cache-Control': assert 'private' in v assert 'must-revalidate' in v has_cc = True elif k == 'Set-Cookie': cookie = v assert has_ct assert has_v assert has_cc # do not cache logged-in user's content assert '</html>' in ''.join(appiter) # The trail is only ever saved on the second page display # because otherwise anonymous sessions would be created # for every request, even if it never sent a cookie! # Hence, skip over the first request and only verify # the trail for the second and following. if first: first = False continue trail_expected.append(unicode(pagename)) # Requested pagenames get into trail? assert 'trail' in request.session trail = request.session['trail'] assert trail == trail_expected
37.694853
126
0.539647
1,123
10,253
4.854853
0.194123
0.008254
0.019809
0.019259
0.706163
0.67865
0.670213
0.636097
0.629677
0.621607
0
0.004105
0.382327
10,253
271
127
37.833948
0.856782
0.215157
0
0.736264
0
0.005495
0.092871
0
0
0
0
0
0.291209
1
0.038462
false
0.021978
0.038462
0
0.131868
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ba3919953b865da5410b3b3d93a29d7ec9990f48
637
py
Python
tests/testapp/models.py
tjwalch/plata
6994fb5b801e728cc4f4beab4626dd0fcf0d39ff
[ "BSD-3-Clause" ]
null
null
null
tests/testapp/models.py
tjwalch/plata
6994fb5b801e728cc4f4beab4626dd0fcf0d39ff
[ "BSD-3-Clause" ]
null
null
null
tests/testapp/models.py
tjwalch/plata
6994fb5b801e728cc4f4beab4626dd0fcf0d39ff
[ "BSD-3-Clause" ]
null
null
null
from django.db import models from plata.product.models import ProductBase from plata.shop.models import PriceBase class Product(ProductBase): name = models.CharField(max_length=100) items_in_stock = models.IntegerField(default=0) class Meta: ordering = ['name'] def __unicode__(self): return self.name @models.permalink def get_absolute_url(self): return ('plata_product_detail', (self.pk,), {}) @property def sku(self): return u'' class Price(PriceBase): product = models.ForeignKey(Product, related_name='prices') class Meta: ordering = ['-id']
20.548387
63
0.66876
76
637
5.447368
0.552632
0.072464
0.082126
0
0
0
0
0
0
0
0
0.008081
0.22292
637
30
64
21.233333
0.828283
0
0
0.1
0
0
0.051805
0
0
0
0
0
0
1
0.15
false
0
0.15
0.15
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
ba44c5f510f9413058215d2780ed354df3110007
6,777
py
Python
cs15211/CombinationSumII.py
JulyKikuAkita/PythonPrac
0ba027d9b8bc7c80bc89ce2da3543ce7a49a403c
[ "Apache-2.0" ]
1
2021-07-05T01:53:30.000Z
2021-07-05T01:53:30.000Z
cs15211/CombinationSumII.py
JulyKikuAkita/PythonPrac
0ba027d9b8bc7c80bc89ce2da3543ce7a49a403c
[ "Apache-2.0" ]
null
null
null
cs15211/CombinationSumII.py
JulyKikuAkita/PythonPrac
0ba027d9b8bc7c80bc89ce2da3543ce7a49a403c
[ "Apache-2.0" ]
1
2018-01-08T07:14:08.000Z
2018-01-08T07:14:08.000Z
__source__ = 'https://leetcode.com/problems/combination-sum-ii/description/' # https://github.com/kamyu104/LeetCode/blob/master/Python/combination-sum-ii.py # Time: O(n! / m!(n-m)!) # Space: O(m) # DFS # # Description: Leetcode # 40. Combination Sum II # # Given a collection of candidate numbers (C) and a target number (T), # find all unique combinations in C where the candidate numbers sums to T. # # Each number in C may only be used once in the combination. # # Note: # All numbers (including target) will be positive integers. # Elements in a combination (a1, a2, ... , ak) must be in non-descending order. (ie, a1 <= a2 <= ... <= ak). # The solution set must not contain duplicate combinations. # For example, given candidate set 10,1,2,7,6,1,5 and target 8, # A solution set is: # [1, 7] # [1, 2, 5] # [2, 6] # [1, 1, 6] # # Companies # Snapchat # Related Topics # Array Backtracking # Similar Questions # Combination Sum # import unittest class Solution: # @param candidates, a list of integers # @param target, integer # @return a list of lists of integers def combinationSum2(self, candidates, target): result = [] self.combinationSum2Rec(sorted(candidates), result, 0, [] , target) return result def combinationSum2Rec(self, candidates, result, start, intermediate, target): if target == 0: result.append(intermediate) prev = 0 while start < len(candidates) and candidates[start] <= target: if prev != candidates[start]: self.combinationSum2Rec(candidates, result, start + 1, intermediate + [candidates[start]], target - candidates[start]) prev = candidates[start] start += 1 class SolutionOther: # @param candidates, a list of integers # @param target, integer # @return a list of lists of integers def combinationSum2(self, candidates, target): candidates.sort() SolutionOther.ret = [] self.DFS( candidates, target, 0 , []) return SolutionOther.ret def DFS(self, candidates, target, start, valuelist): length = len(candidates) if target == 0 and valuelist not in Solution.ret: return SolutionOther.ret.append(valuelist) for i in range(start, length): if target < candidates[i]: return self.DFS(candidates, target - candidates[i], i+1, valuelist+[candidates[i]]) #test class TestMethods(unittest.TestCase): def test_Local(self): test = SolutionOther() #print test.combinationSum2([6,2,3], 7) #print test.combinationSum2i([2,1], 2) #print test.combinationSum2p([10,1,2,7,6,1,5] , 8) self.assertEqual(1, 1) candidates, target = [10, 1, 2, 7, 6, 1, 5], 8 print Solution().combinationSum2(candidates, target) if __name__ == '__main__': unittest.main() Java = ''' # Thought: # # hashset + sort to avoid duplicates result # 26ms 18.36% class Solution { public List<List<Integer>> combinationSum2(int[] candidates, int target) { List<List<Integer>> list = new ArrayList<>(); Arrays.sort(candidates); backtrack(list, new ArrayList<>(), candidates, target, 0); return list; } private void backtrack(List<List<Integer>> list, List<Integer> tempList, int [] nums, int remain, int start){ if (remain < 0) return; if (remain == 0) { list.add(new ArrayList<>(tempList)); return; } Set<Integer> set = new HashSet<>(); for (int i = start; i < nums.length; i++) { if ( !set.contains(nums[i])) { tempList.add(nums[i]); set.add(nums[i]); backtrack(list, tempList, nums, remain - nums[i], i + 1); tempList.remove(tempList.size() - 1); } } } } # 11ms 83.52% class Solution { public List<List<Integer>> combinationSum2(int[] nums, int target) { List<List<Integer>> list = new ArrayList<>(); Arrays.sort(nums); backtrack(list, new ArrayList<>(), nums, target, 0); return list; } private void backtrack(List<List<Integer>> list, List<Integer> tempList, int [] nums, int remain, int start){ if(remain < 0) return; else if(remain == 0) list.add(new ArrayList<>(tempList)); else{ for(int i = start; i < nums.length; i++){ if(i > start && nums[i] == nums[i-1]) continue; // skip duplicates tempList.add(nums[i]); backtrack(list, tempList, nums, remain - nums[i], i + 1); tempList.remove(tempList.size() - 1); } } } } # 9ms 95.33% class Solution { public List<List<Integer>> combinationSum2(int[] candidates, int target) { List<List<Integer>> result = new ArrayList<>(); if (target == 0) return result; Arrays.sort(candidates); dfs(result, new ArrayList<>(), candidates, target, 0); return result; } private void dfs(List<List<Integer>> res, List<Integer> cur, int[] candidates, int target, int start){ if (target > 0) { for(int i = start; i < candidates.length && candidates[i] <= target; i++) { if (i > start && candidates[i] == candidates[i - 1]) continue; cur.add(candidates[i]); dfs(res, cur, candidates, target - candidates[i], i + 1); cur.remove(cur.size() - 1); } } if (target == 0) { res.add(new ArrayList<>(cur)); } } } # bottom-up # 8ms 99.28% class Solution { public List<List<Integer>> combinationSum2(int[] candidates, int target) { List<List<Integer>> result = new ArrayList<>(); if (target == 0) { return result; } Arrays.sort(candidates); combinationSum2(candidates, target, 0, result, new ArrayList<>(), false); return result; } private void combinationSum2(int[] candidates, int target, int index, List<List<Integer>> result, List<Integer> cur, boolean lastAdded) { if (target == 0) { result.add(new ArrayList<>(cur)); return; } else if (index == candidates.length || candidates[index] > target) { //need to after target == 0 //need else if increase runtime return; } combinationSum2(candidates, target, index + 1, result, cur, false); if (index == 0 || lastAdded || candidates[index - 1] != candidates[index]) { cur.add(candidates[index]); combinationSum2(candidates, target - candidates[index], index + 1, result, cur, true); cur.remove(cur.size() - 1); } } } '''
34.753846
141
0.587871
799
6,777
4.969962
0.217772
0.044321
0.052883
0.027701
0.373458
0.345253
0.316293
0.314279
0.278519
0.265424
0
0.026327
0.276966
6,777
194
142
34.93299
0.784082
0.164822
0
0.294964
0
0.05036
0.700249
0.107868
0
0
0
0
0.007194
0
null
null
0
0.007194
null
null
0.007194
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
ba52c6f6260e350236036be1801e524ac082a3d5
201
py
Python
examples/02 - Generators/config.py
dionisioC/pythonPerformance
dd239265c3c08581d609f1b09d242c2d9a7dfe68
[ "MIT" ]
null
null
null
examples/02 - Generators/config.py
dionisioC/pythonPerformance
dd239265c3c08581d609f1b09d242c2d9a7dfe68
[ "MIT" ]
null
null
null
examples/02 - Generators/config.py
dionisioC/pythonPerformance
dd239265c3c08581d609f1b09d242c2d9a7dfe68
[ "MIT" ]
1
2019-10-31T17:30:18.000Z
2019-10-31T17:30:18.000Z
# coding=utf-8 import ConfigParser def get_config(): configParser = ConfigParser.RawConfigParser() configFilePath = r'config.ini' configParser.read(configFilePath) return configParser
22.333333
49
0.756219
20
201
7.55
0.7
0
0
0
0
0
0
0
0
0
0
0.005917
0.159204
201
9
50
22.333333
0.887574
0.059701
0
0
0
0
0.053191
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
ba564174c2ca08f7129fb40caa5a6f40c8ab64e1
605
py
Python
notebooks/0-Base.py
GarrisonD/elo-competition
64897fc38df9a85c8c291f8cbe4330b510e654ba
[ "MIT" ]
null
null
null
notebooks/0-Base.py
GarrisonD/elo-competition
64897fc38df9a85c8c291f8cbe4330b510e654ba
[ "MIT" ]
7
2019-09-22T18:39:00.000Z
2022-03-26T19:58:23.000Z
notebooks/0-Base.py
GarrisonD/elo-competition
64897fc38df9a85c8c291f8cbe4330b510e654ba
[ "MIT" ]
null
null
null
# --- # jupyter: # jupytext: # cell_metadata_filter: ExecuteTime # formats: ipynb,py # text_representation: # extension: .py # format_name: light # format_version: '1.5' # jupytext_version: 1.3.0 # kernelspec: # display_name: Python 3 # language: python # name: python3 # --- # + # %load_ext autoreload # %autoreload 2 # - import numpy as np import pandas as pd # + import matplotlib.pyplot as plt # %matplotlib inline # %config InlineBackend.figure_format = 'retina' # - import seaborn as sns from tqdm.auto import tqdm DATA_PATH = '../data'
15.125
48
0.642975
71
605
5.338028
0.704225
0.042216
0
0
0
0
0
0
0
0
0
0.017467
0.242975
605
39
49
15.512821
0.810044
0.671074
0
0
0
0
0.04
0
0
0
0
0
0
1
0
false
0
0.833333
0
0.833333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
ba7f2235f66c0e0ff039827e067d156fc6d737b6
2,761
py
Python
Class/Abstraction.py
danial-shehroz-khan/pythonDemo
4bd1be78f357a33997f93c3656637b79ade6f8e2
[ "MIT" ]
null
null
null
Class/Abstraction.py
danial-shehroz-khan/pythonDemo
4bd1be78f357a33997f93c3656637b79ade6f8e2
[ "MIT" ]
null
null
null
Class/Abstraction.py
danial-shehroz-khan/pythonDemo
4bd1be78f357a33997f93c3656637b79ade6f8e2
[ "MIT" ]
null
null
null
ABSTRACTION Function is defined in abstract class with defination and pass keyword Inherit class has to use those fucntion at all cost In Parent class has to be inherited by abc From abc import abstact method from abc import abstractmethod,ABC class BMW(ABC): def __init__(self,make,model,year): self.make=make self.model=model self.year=year def start(self): #This function will also be inherited by the child class print("Starting the Car") def stop(self): #same as above comment print("Stopping the Car") @abstractmethod def drive(self): pass class ThreeSeries(BMW): def __init__(self,cruiseControlEnabled,make,model,year): #BMW.__init__(self,make,model,year) 1)))))))we have to add super instead of BMW super().__init__(make,model,year) #we wont be needing "self" any more self.cruiseControlEnabled=cruiseControlEnabled def display(self): print(self.cruiseControlEnabled) def drive(self): print ("Three series is Driven") class FiveSeries(BMW): def __init__(self,parkingAssistEnabled,make,model,year): BMW.__init__(self,make,model,year) #InvokeTheClassYouWantToInherItFeaturesFrom self.parkingAssistEnabled=parkingAssistEnabled def display(self): print(self.parkingAssistEnabled) def stop(self): #same as above comment super().start() #concept 2 using function of parent class form child class print("Button Start") def drive(self): print("Five Series is driven") threeSeries=ThreeSeries(True,"BMW","328i","2018") print(threeSeries.cruiseControlEnabled) print(threeSeries.make) print(threeSeries.model) print(threeSeries.year) threeSeries.start() threeSeries.stop() threeSeries.display() fiveSeries=FiveSeries(True,"BMW","528i","2020") print(fiveSeries.parkingAssistEnabled) print(fiveSeries.make) print(fiveSeries.model) print(fiveSeries.year) fiveSeries.start() fiveSeries.stop() fiveSeries.display() ------------------------------------------------------------------------------ #2)Assignement from abc import abstractmethod,ABC class TouchScreenLaptop(ABC): @abstractmethod def scroll(self): pass @abstractmethod def click(self): pass class HP(TouchScreenLaptop): def scroll(self): print("You have scrolled") @abstractmethod def click(self): pass class HPNotebook(HP): def scroll(self): print("You have scrolled") def click(self): print("You have clicked") HP=HPNotebook() print(HP.scroll()) print(HP.click())
23.008333
95
0.646505
310
2,761
5.680645
0.303226
0.035775
0.044293
0.028961
0.229983
0.191936
0.112436
0.042022
0.042022
0
0
0.008026
0.232887
2,761
119
96
23.201681
0.823418
0.116624
0
0.328947
0
0
0.065432
0
0
0
0
0
0
0
null
null
0.065789
0.039474
null
null
0.263158
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
ba9a897f397322253b6ddb6d3f56def95e28e4c5
255
py
Python
Web/Member/FunctionForPI/Servo_9G.py
tratitude/BridgeMaster
e3916b077d96f3520d0a8ed9bb548d614465aa2e
[ "Apache-2.0" ]
1
2021-01-05T14:40:08.000Z
2021-01-05T14:40:08.000Z
Web/Member/FunctionForPI/Servo_9G.py
fdmdkw/BridgeMaster
e3916b077d96f3520d0a8ed9bb548d614465aa2e
[ "Apache-2.0" ]
1
2021-10-19T08:05:06.000Z
2021-10-19T08:05:06.000Z
Web/Member/FunctionForPI/Servo_9G.py
fdmdkw/BridgeMaster
e3916b077d96f3520d0a8ed9bb548d614465aa2e
[ "Apache-2.0" ]
2
2019-10-21T15:25:37.000Z
2021-03-17T06:59:09.000Z
import RPi.GPIO as GPIO import time GPIO.setmode(GPIO.BOARD) 9G = 7 GPIO.setup(9G,GPIO.OUT) p = GPIO.PWM(7,50) p.start(5.9) def Left(): p.ChangeDutyCycle(4) def Right(): p.ChangeDutyCycle(8) def Balance(): p.ChangeDutyCycle(5.9)
12.142857
26
0.65098
43
255
3.860465
0.55814
0.289157
0
0
0
0
0
0
0
0
0
0.058537
0.196078
255
20
27
12.75
0.75122
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0.153846
null
null
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
ba9ec5e5414080b2d2b2188405f0f36d6cab29c5
69,907
py
Python
src/recommendationservice/demo_pb2.py
honeycombio/microservices-demo
a103c826ea737a2092f9f3839e39d61085707320
[ "Apache-2.0" ]
13
2021-05-11T10:32:03.000Z
2022-02-24T19:54:04.000Z
src/emailservice/demo_pb2.py
honeycombio/microservices-demo
a103c826ea737a2092f9f3839e39d61085707320
[ "Apache-2.0" ]
1
2021-12-15T20:49:44.000Z
2021-12-15T20:49:56.000Z
src/emailservice/demo_pb2.py
honeycombio/microservices-demo
a103c826ea737a2092f9f3839e39d61085707320
[ "Apache-2.0" ]
4
2021-04-05T10:41:46.000Z
2022-03-02T11:16:59.000Z
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: demo.proto """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='demo.proto', package='msdemo', syntax='proto3', serialized_options=b'Z\013demo/msdemo', create_key=_descriptor._internal_create_key, serialized_pb=b'\n\ndemo.proto\x12\x06msdemo\"0\n\x08\x43\x61rtItem\x12\x12\n\nproduct_id\x18\x01 \x01(\t\x12\x10\n\x08quantity\x18\x02 \x01(\x05\"A\n\x0e\x41\x64\x64ItemRequest\x12\x0f\n\x07user_id\x18\x01 \x01(\t\x12\x1e\n\x04item\x18\x02 \x01(\x0b\x32\x10.msdemo.CartItem\"#\n\x10\x45mptyCartRequest\x12\x0f\n\x07user_id\x18\x01 \x01(\t\"!\n\x0eGetCartRequest\x12\x0f\n\x07user_id\x18\x01 \x01(\t\"8\n\x04\x43\x61rt\x12\x0f\n\x07user_id\x18\x01 \x01(\t\x12\x1f\n\x05items\x18\x02 \x03(\x0b\x32\x10.msdemo.CartItem\"\x07\n\x05\x45mpty\"B\n\x1aListRecommendationsRequest\x12\x0f\n\x07user_id\x18\x01 \x01(\t\x12\x13\n\x0bproduct_ids\x18\x02 \x03(\t\"2\n\x1bListRecommendationsResponse\x12\x13\n\x0bproduct_ids\x18\x01 \x03(\t\"\x7f\n\x07Product\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x0f\n\x07picture\x18\x04 \x01(\t\x12 \n\tprice_usd\x18\x05 \x01(\x0b\x32\r.msdemo.Money\x12\x12\n\ncategories\x18\x06 \x03(\t\"9\n\x14ListProductsResponse\x12!\n\x08products\x18\x01 \x03(\x0b\x32\x0f.msdemo.Product\"\x1f\n\x11GetProductRequest\x12\n\n\x02id\x18\x01 \x01(\t\"&\n\x15SearchProductsRequest\x12\r\n\x05query\x18\x01 \x01(\t\":\n\x16SearchProductsResponse\x12 \n\x07results\x18\x01 \x03(\x0b\x32\x0f.msdemo.Product\"T\n\x0fGetQuoteRequest\x12 \n\x07\x61\x64\x64ress\x18\x01 \x01(\x0b\x32\x0f.msdemo.Address\x12\x1f\n\x05items\x18\x02 \x03(\x0b\x32\x10.msdemo.CartItem\"3\n\x10GetQuoteResponse\x12\x1f\n\x08\x63ost_usd\x18\x01 \x01(\x0b\x32\r.msdemo.Money\"U\n\x10ShipOrderRequest\x12 \n\x07\x61\x64\x64ress\x18\x01 \x01(\x0b\x32\x0f.msdemo.Address\x12\x1f\n\x05items\x18\x02 \x03(\x0b\x32\x10.msdemo.CartItem\"(\n\x11ShipOrderResponse\x12\x13\n\x0btracking_id\x18\x01 \x01(\t\"a\n\x07\x41\x64\x64ress\x12\x16\n\x0estreet_address\x18\x01 \x01(\t\x12\x0c\n\x04\x63ity\x18\x02 \x01(\t\x12\r\n\x05state\x18\x03 \x01(\t\x12\x0f\n\x07\x63ountry\x18\x04 \x01(\t\x12\x10\n\x08zip_code\x18\x05 \x01(\x05\"<\n\x05Money\x12\x15\n\rcurrency_code\x18\x01 \x01(\t\x12\r\n\x05units\x18\x02 \x01(\x03\x12\r\n\x05nanos\x18\x03 \x01(\x05\"8\n\x1eGetSupportedCurrenciesResponse\x12\x16\n\x0e\x63urrency_codes\x18\x01 \x03(\t\"I\n\x19\x43urrencyConversionRequest\x12\x1b\n\x04\x66rom\x18\x01 \x01(\x0b\x32\r.msdemo.Money\x12\x0f\n\x07to_code\x18\x02 \x01(\t\"\x90\x01\n\x0e\x43reditCardInfo\x12\x1a\n\x12\x63redit_card_number\x18\x01 \x01(\t\x12\x17\n\x0f\x63redit_card_cvv\x18\x02 \x01(\x05\x12#\n\x1b\x63redit_card_expiration_year\x18\x03 \x01(\x05\x12$\n\x1c\x63redit_card_expiration_month\x18\x04 \x01(\x05\"[\n\rChargeRequest\x12\x1d\n\x06\x61mount\x18\x01 \x01(\x0b\x32\r.msdemo.Money\x12+\n\x0b\x63redit_card\x18\x02 \x01(\x0b\x32\x16.msdemo.CreditCardInfo\"(\n\x0e\x43hargeResponse\x12\x16\n\x0etransaction_id\x18\x01 \x01(\t\"H\n\tOrderItem\x12\x1e\n\x04item\x18\x01 \x01(\x0b\x32\x10.msdemo.CartItem\x12\x1b\n\x04\x63ost\x18\x02 \x01(\x0b\x32\r.msdemo.Money\"\xb0\x01\n\x0bOrderResult\x12\x10\n\x08order_id\x18\x01 \x01(\t\x12\x1c\n\x14shipping_tracking_id\x18\x02 \x01(\t\x12$\n\rshipping_cost\x18\x03 \x01(\x0b\x32\r.msdemo.Money\x12)\n\x10shipping_address\x18\x04 \x01(\x0b\x32\x0f.msdemo.Address\x12 \n\x05items\x18\x05 \x03(\x0b\x32\x11.msdemo.OrderItem\"Q\n\x1cSendOrderConfirmationRequest\x12\r\n\x05\x65mail\x18\x01 \x01(\t\x12\"\n\x05order\x18\x02 \x01(\x0b\x32\x13.msdemo.OrderResult\"\x99\x01\n\x11PlaceOrderRequest\x12\x0f\n\x07user_id\x18\x01 \x01(\t\x12\x15\n\ruser_currency\x18\x02 \x01(\t\x12 \n\x07\x61\x64\x64ress\x18\x03 \x01(\x0b\x32\x0f.msdemo.Address\x12\r\n\x05\x65mail\x18\x05 \x01(\t\x12+\n\x0b\x63redit_card\x18\x06 \x01(\x0b\x32\x16.msdemo.CreditCardInfo\"8\n\x12PlaceOrderResponse\x12\"\n\x05order\x18\x01 \x01(\x0b\x32\x13.msdemo.OrderResult\"\'\n\x11\x43\x61\x63heSizeResponse\x12\x12\n\ncache_size\x18\x01 \x01(\x03\"!\n\tAdRequest\x12\x14\n\x0c\x63ontext_keys\x18\x01 \x03(\t\"%\n\nAdResponse\x12\x17\n\x03\x61\x64s\x18\x01 \x03(\x0b\x32\n.msdemo.Ad\"(\n\x02\x41\x64\x12\x14\n\x0credirect_url\x18\x01 \x01(\t\x12\x0c\n\x04text\x18\x02 \x01(\t2\xac\x01\n\x0b\x43\x61rtService\x12\x32\n\x07\x41\x64\x64Item\x12\x16.msdemo.AddItemRequest\x1a\r.msdemo.Empty\"\x00\x12\x31\n\x07GetCart\x12\x16.msdemo.GetCartRequest\x1a\x0c.msdemo.Cart\"\x00\x12\x36\n\tEmptyCart\x12\x18.msdemo.EmptyCartRequest\x1a\r.msdemo.Empty\"\x00\x32y\n\x15RecommendationService\x12`\n\x13ListRecommendations\x12\".msdemo.ListRecommendationsRequest\x1a#.msdemo.ListRecommendationsResponse\"\x00\x32\xe5\x01\n\x15ProductCatalogService\x12=\n\x0cListProducts\x12\r.msdemo.Empty\x1a\x1c.msdemo.ListProductsResponse\"\x00\x12:\n\nGetProduct\x12\x19.msdemo.GetProductRequest\x1a\x0f.msdemo.Product\"\x00\x12Q\n\x0eSearchProducts\x12\x1d.msdemo.SearchProductsRequest\x1a\x1e.msdemo.SearchProductsResponse\"\x00\x32\x96\x01\n\x0fShippingService\x12?\n\x08GetQuote\x12\x17.msdemo.GetQuoteRequest\x1a\x18.msdemo.GetQuoteResponse\"\x00\x12\x42\n\tShipOrder\x12\x18.msdemo.ShipOrderRequest\x1a\x19.msdemo.ShipOrderResponse\"\x00\x32\xa3\x01\n\x0f\x43urrencyService\x12Q\n\x16GetSupportedCurrencies\x12\r.msdemo.Empty\x1a&.msdemo.GetSupportedCurrenciesResponse\"\x00\x12=\n\x07\x43onvert\x12!.msdemo.CurrencyConversionRequest\x1a\r.msdemo.Money\"\x00\x32K\n\x0ePaymentService\x12\x39\n\x06\x43harge\x12\x15.msdemo.ChargeRequest\x1a\x16.msdemo.ChargeResponse\"\x00\x32^\n\x0c\x45mailService\x12N\n\x15SendOrderConfirmation\x12$.msdemo.SendOrderConfirmationRequest\x1a\r.msdemo.Empty\"\x00\x32\x94\x01\n\x0f\x43heckoutService\x12\x45\n\nPlaceOrder\x12\x19.msdemo.PlaceOrderRequest\x1a\x1a.msdemo.PlaceOrderResponse\"\x00\x12:\n\x0cGetCacheSize\x12\r.msdemo.Empty\x1a\x19.msdemo.CacheSizeResponse\"\x00\x32>\n\tAdService\x12\x31\n\x06GetAds\x12\x11.msdemo.AdRequest\x1a\x12.msdemo.AdResponse\"\x00\x42\rZ\x0b\x64\x65mo/msdemob\x06proto3' ) _CARTITEM = _descriptor.Descriptor( name='CartItem', full_name='msdemo.CartItem', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='product_id', full_name='msdemo.CartItem.product_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='quantity', full_name='msdemo.CartItem.quantity', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=22, serialized_end=70, ) _ADDITEMREQUEST = _descriptor.Descriptor( name='AddItemRequest', full_name='msdemo.AddItemRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='user_id', full_name='msdemo.AddItemRequest.user_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='item', full_name='msdemo.AddItemRequest.item', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=72, serialized_end=137, ) _EMPTYCARTREQUEST = _descriptor.Descriptor( name='EmptyCartRequest', full_name='msdemo.EmptyCartRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='user_id', full_name='msdemo.EmptyCartRequest.user_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=139, serialized_end=174, ) _GETCARTREQUEST = _descriptor.Descriptor( name='GetCartRequest', full_name='msdemo.GetCartRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='user_id', full_name='msdemo.GetCartRequest.user_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=176, serialized_end=209, ) _CART = _descriptor.Descriptor( name='Cart', full_name='msdemo.Cart', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='user_id', full_name='msdemo.Cart.user_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='items', full_name='msdemo.Cart.items', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=211, serialized_end=267, ) _EMPTY = _descriptor.Descriptor( name='Empty', full_name='msdemo.Empty', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=269, serialized_end=276, ) _LISTRECOMMENDATIONSREQUEST = _descriptor.Descriptor( name='ListRecommendationsRequest', full_name='msdemo.ListRecommendationsRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='user_id', full_name='msdemo.ListRecommendationsRequest.user_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='product_ids', full_name='msdemo.ListRecommendationsRequest.product_ids', index=1, number=2, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=278, serialized_end=344, ) _LISTRECOMMENDATIONSRESPONSE = _descriptor.Descriptor( name='ListRecommendationsResponse', full_name='msdemo.ListRecommendationsResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='product_ids', full_name='msdemo.ListRecommendationsResponse.product_ids', index=0, number=1, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=346, serialized_end=396, ) _PRODUCT = _descriptor.Descriptor( name='Product', full_name='msdemo.Product', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='id', full_name='msdemo.Product.id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='name', full_name='msdemo.Product.name', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='description', full_name='msdemo.Product.description', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='picture', full_name='msdemo.Product.picture', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='price_usd', full_name='msdemo.Product.price_usd', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='categories', full_name='msdemo.Product.categories', index=5, number=6, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=398, serialized_end=525, ) _LISTPRODUCTSRESPONSE = _descriptor.Descriptor( name='ListProductsResponse', full_name='msdemo.ListProductsResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='products', full_name='msdemo.ListProductsResponse.products', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=527, serialized_end=584, ) _GETPRODUCTREQUEST = _descriptor.Descriptor( name='GetProductRequest', full_name='msdemo.GetProductRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='id', full_name='msdemo.GetProductRequest.id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=586, serialized_end=617, ) _SEARCHPRODUCTSREQUEST = _descriptor.Descriptor( name='SearchProductsRequest', full_name='msdemo.SearchProductsRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='query', full_name='msdemo.SearchProductsRequest.query', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=619, serialized_end=657, ) _SEARCHPRODUCTSRESPONSE = _descriptor.Descriptor( name='SearchProductsResponse', full_name='msdemo.SearchProductsResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='results', full_name='msdemo.SearchProductsResponse.results', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=659, serialized_end=717, ) _GETQUOTEREQUEST = _descriptor.Descriptor( name='GetQuoteRequest', full_name='msdemo.GetQuoteRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='address', full_name='msdemo.GetQuoteRequest.address', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='items', full_name='msdemo.GetQuoteRequest.items', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=719, serialized_end=803, ) _GETQUOTERESPONSE = _descriptor.Descriptor( name='GetQuoteResponse', full_name='msdemo.GetQuoteResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='cost_usd', full_name='msdemo.GetQuoteResponse.cost_usd', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=805, serialized_end=856, ) _SHIPORDERREQUEST = _descriptor.Descriptor( name='ShipOrderRequest', full_name='msdemo.ShipOrderRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='address', full_name='msdemo.ShipOrderRequest.address', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='items', full_name='msdemo.ShipOrderRequest.items', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=858, serialized_end=943, ) _SHIPORDERRESPONSE = _descriptor.Descriptor( name='ShipOrderResponse', full_name='msdemo.ShipOrderResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='tracking_id', full_name='msdemo.ShipOrderResponse.tracking_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=945, serialized_end=985, ) _ADDRESS = _descriptor.Descriptor( name='Address', full_name='msdemo.Address', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='street_address', full_name='msdemo.Address.street_address', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='city', full_name='msdemo.Address.city', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='state', full_name='msdemo.Address.state', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='country', full_name='msdemo.Address.country', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='zip_code', full_name='msdemo.Address.zip_code', index=4, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=987, serialized_end=1084, ) _MONEY = _descriptor.Descriptor( name='Money', full_name='msdemo.Money', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='currency_code', full_name='msdemo.Money.currency_code', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='units', full_name='msdemo.Money.units', index=1, number=2, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='nanos', full_name='msdemo.Money.nanos', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1086, serialized_end=1146, ) _GETSUPPORTEDCURRENCIESRESPONSE = _descriptor.Descriptor( name='GetSupportedCurrenciesResponse', full_name='msdemo.GetSupportedCurrenciesResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='currency_codes', full_name='msdemo.GetSupportedCurrenciesResponse.currency_codes', index=0, number=1, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1148, serialized_end=1204, ) _CURRENCYCONVERSIONREQUEST = _descriptor.Descriptor( name='CurrencyConversionRequest', full_name='msdemo.CurrencyConversionRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='from', full_name='msdemo.CurrencyConversionRequest.from', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='to_code', full_name='msdemo.CurrencyConversionRequest.to_code', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1206, serialized_end=1279, ) _CREDITCARDINFO = _descriptor.Descriptor( name='CreditCardInfo', full_name='msdemo.CreditCardInfo', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='credit_card_number', full_name='msdemo.CreditCardInfo.credit_card_number', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='credit_card_cvv', full_name='msdemo.CreditCardInfo.credit_card_cvv', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='credit_card_expiration_year', full_name='msdemo.CreditCardInfo.credit_card_expiration_year', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='credit_card_expiration_month', full_name='msdemo.CreditCardInfo.credit_card_expiration_month', index=3, number=4, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1282, serialized_end=1426, ) _CHARGEREQUEST = _descriptor.Descriptor( name='ChargeRequest', full_name='msdemo.ChargeRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='amount', full_name='msdemo.ChargeRequest.amount', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='credit_card', full_name='msdemo.ChargeRequest.credit_card', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1428, serialized_end=1519, ) _CHARGERESPONSE = _descriptor.Descriptor( name='ChargeResponse', full_name='msdemo.ChargeResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='msdemo.ChargeResponse.transaction_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1521, serialized_end=1561, ) _ORDERITEM = _descriptor.Descriptor( name='OrderItem', full_name='msdemo.OrderItem', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='item', full_name='msdemo.OrderItem.item', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='cost', full_name='msdemo.OrderItem.cost', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1563, serialized_end=1635, ) _ORDERRESULT = _descriptor.Descriptor( name='OrderResult', full_name='msdemo.OrderResult', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='order_id', full_name='msdemo.OrderResult.order_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='shipping_tracking_id', full_name='msdemo.OrderResult.shipping_tracking_id', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='shipping_cost', full_name='msdemo.OrderResult.shipping_cost', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='shipping_address', full_name='msdemo.OrderResult.shipping_address', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='items', full_name='msdemo.OrderResult.items', index=4, number=5, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1638, serialized_end=1814, ) _SENDORDERCONFIRMATIONREQUEST = _descriptor.Descriptor( name='SendOrderConfirmationRequest', full_name='msdemo.SendOrderConfirmationRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='email', full_name='msdemo.SendOrderConfirmationRequest.email', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='order', full_name='msdemo.SendOrderConfirmationRequest.order', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1816, serialized_end=1897, ) _PLACEORDERREQUEST = _descriptor.Descriptor( name='PlaceOrderRequest', full_name='msdemo.PlaceOrderRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='user_id', full_name='msdemo.PlaceOrderRequest.user_id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='user_currency', full_name='msdemo.PlaceOrderRequest.user_currency', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='address', full_name='msdemo.PlaceOrderRequest.address', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='email', full_name='msdemo.PlaceOrderRequest.email', index=3, number=5, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='credit_card', full_name='msdemo.PlaceOrderRequest.credit_card', index=4, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1900, serialized_end=2053, ) _PLACEORDERRESPONSE = _descriptor.Descriptor( name='PlaceOrderResponse', full_name='msdemo.PlaceOrderResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='order', full_name='msdemo.PlaceOrderResponse.order', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2055, serialized_end=2111, ) _CACHESIZERESPONSE = _descriptor.Descriptor( name='CacheSizeResponse', full_name='msdemo.CacheSizeResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='cache_size', full_name='msdemo.CacheSizeResponse.cache_size', index=0, number=1, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2113, serialized_end=2152, ) _ADREQUEST = _descriptor.Descriptor( name='AdRequest', full_name='msdemo.AdRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='context_keys', full_name='msdemo.AdRequest.context_keys', index=0, number=1, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2154, serialized_end=2187, ) _ADRESPONSE = _descriptor.Descriptor( name='AdResponse', full_name='msdemo.AdResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='ads', full_name='msdemo.AdResponse.ads', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2189, serialized_end=2226, ) _AD = _descriptor.Descriptor( name='Ad', full_name='msdemo.Ad', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='redirect_url', full_name='msdemo.Ad.redirect_url', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='text', full_name='msdemo.Ad.text', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2228, serialized_end=2268, ) _ADDITEMREQUEST.fields_by_name['item'].message_type = _CARTITEM _CART.fields_by_name['items'].message_type = _CARTITEM _PRODUCT.fields_by_name['price_usd'].message_type = _MONEY _LISTPRODUCTSRESPONSE.fields_by_name['products'].message_type = _PRODUCT _SEARCHPRODUCTSRESPONSE.fields_by_name['results'].message_type = _PRODUCT _GETQUOTEREQUEST.fields_by_name['address'].message_type = _ADDRESS _GETQUOTEREQUEST.fields_by_name['items'].message_type = _CARTITEM _GETQUOTERESPONSE.fields_by_name['cost_usd'].message_type = _MONEY _SHIPORDERREQUEST.fields_by_name['address'].message_type = _ADDRESS _SHIPORDERREQUEST.fields_by_name['items'].message_type = _CARTITEM _CURRENCYCONVERSIONREQUEST.fields_by_name['from'].message_type = _MONEY _CHARGEREQUEST.fields_by_name['amount'].message_type = _MONEY _CHARGEREQUEST.fields_by_name['credit_card'].message_type = _CREDITCARDINFO _ORDERITEM.fields_by_name['item'].message_type = _CARTITEM _ORDERITEM.fields_by_name['cost'].message_type = _MONEY _ORDERRESULT.fields_by_name['shipping_cost'].message_type = _MONEY _ORDERRESULT.fields_by_name['shipping_address'].message_type = _ADDRESS _ORDERRESULT.fields_by_name['items'].message_type = _ORDERITEM _SENDORDERCONFIRMATIONREQUEST.fields_by_name['order'].message_type = _ORDERRESULT _PLACEORDERREQUEST.fields_by_name['address'].message_type = _ADDRESS _PLACEORDERREQUEST.fields_by_name['credit_card'].message_type = _CREDITCARDINFO _PLACEORDERRESPONSE.fields_by_name['order'].message_type = _ORDERRESULT _ADRESPONSE.fields_by_name['ads'].message_type = _AD DESCRIPTOR.message_types_by_name['CartItem'] = _CARTITEM DESCRIPTOR.message_types_by_name['AddItemRequest'] = _ADDITEMREQUEST DESCRIPTOR.message_types_by_name['EmptyCartRequest'] = _EMPTYCARTREQUEST DESCRIPTOR.message_types_by_name['GetCartRequest'] = _GETCARTREQUEST DESCRIPTOR.message_types_by_name['Cart'] = _CART DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY DESCRIPTOR.message_types_by_name['ListRecommendationsRequest'] = _LISTRECOMMENDATIONSREQUEST DESCRIPTOR.message_types_by_name['ListRecommendationsResponse'] = _LISTRECOMMENDATIONSRESPONSE DESCRIPTOR.message_types_by_name['Product'] = _PRODUCT DESCRIPTOR.message_types_by_name['ListProductsResponse'] = _LISTPRODUCTSRESPONSE DESCRIPTOR.message_types_by_name['GetProductRequest'] = _GETPRODUCTREQUEST DESCRIPTOR.message_types_by_name['SearchProductsRequest'] = _SEARCHPRODUCTSREQUEST DESCRIPTOR.message_types_by_name['SearchProductsResponse'] = _SEARCHPRODUCTSRESPONSE DESCRIPTOR.message_types_by_name['GetQuoteRequest'] = _GETQUOTEREQUEST DESCRIPTOR.message_types_by_name['GetQuoteResponse'] = _GETQUOTERESPONSE DESCRIPTOR.message_types_by_name['ShipOrderRequest'] = _SHIPORDERREQUEST DESCRIPTOR.message_types_by_name['ShipOrderResponse'] = _SHIPORDERRESPONSE DESCRIPTOR.message_types_by_name['Address'] = _ADDRESS DESCRIPTOR.message_types_by_name['Money'] = _MONEY DESCRIPTOR.message_types_by_name['GetSupportedCurrenciesResponse'] = _GETSUPPORTEDCURRENCIESRESPONSE DESCRIPTOR.message_types_by_name['CurrencyConversionRequest'] = _CURRENCYCONVERSIONREQUEST DESCRIPTOR.message_types_by_name['CreditCardInfo'] = _CREDITCARDINFO DESCRIPTOR.message_types_by_name['ChargeRequest'] = _CHARGEREQUEST DESCRIPTOR.message_types_by_name['ChargeResponse'] = _CHARGERESPONSE DESCRIPTOR.message_types_by_name['OrderItem'] = _ORDERITEM DESCRIPTOR.message_types_by_name['OrderResult'] = _ORDERRESULT DESCRIPTOR.message_types_by_name['SendOrderConfirmationRequest'] = _SENDORDERCONFIRMATIONREQUEST DESCRIPTOR.message_types_by_name['PlaceOrderRequest'] = _PLACEORDERREQUEST DESCRIPTOR.message_types_by_name['PlaceOrderResponse'] = _PLACEORDERRESPONSE DESCRIPTOR.message_types_by_name['CacheSizeResponse'] = _CACHESIZERESPONSE DESCRIPTOR.message_types_by_name['AdRequest'] = _ADREQUEST DESCRIPTOR.message_types_by_name['AdResponse'] = _ADRESPONSE DESCRIPTOR.message_types_by_name['Ad'] = _AD _sym_db.RegisterFileDescriptor(DESCRIPTOR) CartItem = _reflection.GeneratedProtocolMessageType('CartItem', (_message.Message,), { 'DESCRIPTOR' : _CARTITEM, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.CartItem) }) _sym_db.RegisterMessage(CartItem) AddItemRequest = _reflection.GeneratedProtocolMessageType('AddItemRequest', (_message.Message,), { 'DESCRIPTOR' : _ADDITEMREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.AddItemRequest) }) _sym_db.RegisterMessage(AddItemRequest) EmptyCartRequest = _reflection.GeneratedProtocolMessageType('EmptyCartRequest', (_message.Message,), { 'DESCRIPTOR' : _EMPTYCARTREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.EmptyCartRequest) }) _sym_db.RegisterMessage(EmptyCartRequest) GetCartRequest = _reflection.GeneratedProtocolMessageType('GetCartRequest', (_message.Message,), { 'DESCRIPTOR' : _GETCARTREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.GetCartRequest) }) _sym_db.RegisterMessage(GetCartRequest) Cart = _reflection.GeneratedProtocolMessageType('Cart', (_message.Message,), { 'DESCRIPTOR' : _CART, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.Cart) }) _sym_db.RegisterMessage(Cart) Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), { 'DESCRIPTOR' : _EMPTY, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.Empty) }) _sym_db.RegisterMessage(Empty) ListRecommendationsRequest = _reflection.GeneratedProtocolMessageType('ListRecommendationsRequest', (_message.Message,), { 'DESCRIPTOR' : _LISTRECOMMENDATIONSREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.ListRecommendationsRequest) }) _sym_db.RegisterMessage(ListRecommendationsRequest) ListRecommendationsResponse = _reflection.GeneratedProtocolMessageType('ListRecommendationsResponse', (_message.Message,), { 'DESCRIPTOR' : _LISTRECOMMENDATIONSRESPONSE, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.ListRecommendationsResponse) }) _sym_db.RegisterMessage(ListRecommendationsResponse) Product = _reflection.GeneratedProtocolMessageType('Product', (_message.Message,), { 'DESCRIPTOR' : _PRODUCT, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.Product) }) _sym_db.RegisterMessage(Product) ListProductsResponse = _reflection.GeneratedProtocolMessageType('ListProductsResponse', (_message.Message,), { 'DESCRIPTOR' : _LISTPRODUCTSRESPONSE, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.ListProductsResponse) }) _sym_db.RegisterMessage(ListProductsResponse) GetProductRequest = _reflection.GeneratedProtocolMessageType('GetProductRequest', (_message.Message,), { 'DESCRIPTOR' : _GETPRODUCTREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.GetProductRequest) }) _sym_db.RegisterMessage(GetProductRequest) SearchProductsRequest = _reflection.GeneratedProtocolMessageType('SearchProductsRequest', (_message.Message,), { 'DESCRIPTOR' : _SEARCHPRODUCTSREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.SearchProductsRequest) }) _sym_db.RegisterMessage(SearchProductsRequest) SearchProductsResponse = _reflection.GeneratedProtocolMessageType('SearchProductsResponse', (_message.Message,), { 'DESCRIPTOR' : _SEARCHPRODUCTSRESPONSE, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.SearchProductsResponse) }) _sym_db.RegisterMessage(SearchProductsResponse) GetQuoteRequest = _reflection.GeneratedProtocolMessageType('GetQuoteRequest', (_message.Message,), { 'DESCRIPTOR' : _GETQUOTEREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.GetQuoteRequest) }) _sym_db.RegisterMessage(GetQuoteRequest) GetQuoteResponse = _reflection.GeneratedProtocolMessageType('GetQuoteResponse', (_message.Message,), { 'DESCRIPTOR' : _GETQUOTERESPONSE, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.GetQuoteResponse) }) _sym_db.RegisterMessage(GetQuoteResponse) ShipOrderRequest = _reflection.GeneratedProtocolMessageType('ShipOrderRequest', (_message.Message,), { 'DESCRIPTOR' : _SHIPORDERREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.ShipOrderRequest) }) _sym_db.RegisterMessage(ShipOrderRequest) ShipOrderResponse = _reflection.GeneratedProtocolMessageType('ShipOrderResponse', (_message.Message,), { 'DESCRIPTOR' : _SHIPORDERRESPONSE, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.ShipOrderResponse) }) _sym_db.RegisterMessage(ShipOrderResponse) Address = _reflection.GeneratedProtocolMessageType('Address', (_message.Message,), { 'DESCRIPTOR' : _ADDRESS, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.Address) }) _sym_db.RegisterMessage(Address) Money = _reflection.GeneratedProtocolMessageType('Money', (_message.Message,), { 'DESCRIPTOR' : _MONEY, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.Money) }) _sym_db.RegisterMessage(Money) GetSupportedCurrenciesResponse = _reflection.GeneratedProtocolMessageType('GetSupportedCurrenciesResponse', (_message.Message,), { 'DESCRIPTOR' : _GETSUPPORTEDCURRENCIESRESPONSE, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.GetSupportedCurrenciesResponse) }) _sym_db.RegisterMessage(GetSupportedCurrenciesResponse) CurrencyConversionRequest = _reflection.GeneratedProtocolMessageType('CurrencyConversionRequest', (_message.Message,), { 'DESCRIPTOR' : _CURRENCYCONVERSIONREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.CurrencyConversionRequest) }) _sym_db.RegisterMessage(CurrencyConversionRequest) CreditCardInfo = _reflection.GeneratedProtocolMessageType('CreditCardInfo', (_message.Message,), { 'DESCRIPTOR' : _CREDITCARDINFO, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.CreditCardInfo) }) _sym_db.RegisterMessage(CreditCardInfo) ChargeRequest = _reflection.GeneratedProtocolMessageType('ChargeRequest', (_message.Message,), { 'DESCRIPTOR' : _CHARGEREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.ChargeRequest) }) _sym_db.RegisterMessage(ChargeRequest) ChargeResponse = _reflection.GeneratedProtocolMessageType('ChargeResponse', (_message.Message,), { 'DESCRIPTOR' : _CHARGERESPONSE, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.ChargeResponse) }) _sym_db.RegisterMessage(ChargeResponse) OrderItem = _reflection.GeneratedProtocolMessageType('OrderItem', (_message.Message,), { 'DESCRIPTOR' : _ORDERITEM, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.OrderItem) }) _sym_db.RegisterMessage(OrderItem) OrderResult = _reflection.GeneratedProtocolMessageType('OrderResult', (_message.Message,), { 'DESCRIPTOR' : _ORDERRESULT, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.OrderResult) }) _sym_db.RegisterMessage(OrderResult) SendOrderConfirmationRequest = _reflection.GeneratedProtocolMessageType('SendOrderConfirmationRequest', (_message.Message,), { 'DESCRIPTOR' : _SENDORDERCONFIRMATIONREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.SendOrderConfirmationRequest) }) _sym_db.RegisterMessage(SendOrderConfirmationRequest) PlaceOrderRequest = _reflection.GeneratedProtocolMessageType('PlaceOrderRequest', (_message.Message,), { 'DESCRIPTOR' : _PLACEORDERREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.PlaceOrderRequest) }) _sym_db.RegisterMessage(PlaceOrderRequest) PlaceOrderResponse = _reflection.GeneratedProtocolMessageType('PlaceOrderResponse', (_message.Message,), { 'DESCRIPTOR' : _PLACEORDERRESPONSE, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.PlaceOrderResponse) }) _sym_db.RegisterMessage(PlaceOrderResponse) CacheSizeResponse = _reflection.GeneratedProtocolMessageType('CacheSizeResponse', (_message.Message,), { 'DESCRIPTOR' : _CACHESIZERESPONSE, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.CacheSizeResponse) }) _sym_db.RegisterMessage(CacheSizeResponse) AdRequest = _reflection.GeneratedProtocolMessageType('AdRequest', (_message.Message,), { 'DESCRIPTOR' : _ADREQUEST, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.AdRequest) }) _sym_db.RegisterMessage(AdRequest) AdResponse = _reflection.GeneratedProtocolMessageType('AdResponse', (_message.Message,), { 'DESCRIPTOR' : _ADRESPONSE, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.AdResponse) }) _sym_db.RegisterMessage(AdResponse) Ad = _reflection.GeneratedProtocolMessageType('Ad', (_message.Message,), { 'DESCRIPTOR' : _AD, '__module__' : 'demo_pb2' # @@protoc_insertion_point(class_scope:msdemo.Ad) }) _sym_db.RegisterMessage(Ad) DESCRIPTOR._options = None _CARTSERVICE = _descriptor.ServiceDescriptor( name='CartService', full_name='msdemo.CartService', file=DESCRIPTOR, index=0, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=2271, serialized_end=2443, methods=[ _descriptor.MethodDescriptor( name='AddItem', full_name='msdemo.CartService.AddItem', index=0, containing_service=None, input_type=_ADDITEMREQUEST, output_type=_EMPTY, serialized_options=None, create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='GetCart', full_name='msdemo.CartService.GetCart', index=1, containing_service=None, input_type=_GETCARTREQUEST, output_type=_CART, serialized_options=None, create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='EmptyCart', full_name='msdemo.CartService.EmptyCart', index=2, containing_service=None, input_type=_EMPTYCARTREQUEST, output_type=_EMPTY, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_CARTSERVICE) DESCRIPTOR.services_by_name['CartService'] = _CARTSERVICE _RECOMMENDATIONSERVICE = _descriptor.ServiceDescriptor( name='RecommendationService', full_name='msdemo.RecommendationService', file=DESCRIPTOR, index=1, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=2445, serialized_end=2566, methods=[ _descriptor.MethodDescriptor( name='ListRecommendations', full_name='msdemo.RecommendationService.ListRecommendations', index=0, containing_service=None, input_type=_LISTRECOMMENDATIONSREQUEST, output_type=_LISTRECOMMENDATIONSRESPONSE, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_RECOMMENDATIONSERVICE) DESCRIPTOR.services_by_name['RecommendationService'] = _RECOMMENDATIONSERVICE _PRODUCTCATALOGSERVICE = _descriptor.ServiceDescriptor( name='ProductCatalogService', full_name='msdemo.ProductCatalogService', file=DESCRIPTOR, index=2, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=2569, serialized_end=2798, methods=[ _descriptor.MethodDescriptor( name='ListProducts', full_name='msdemo.ProductCatalogService.ListProducts', index=0, containing_service=None, input_type=_EMPTY, output_type=_LISTPRODUCTSRESPONSE, serialized_options=None, create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='GetProduct', full_name='msdemo.ProductCatalogService.GetProduct', index=1, containing_service=None, input_type=_GETPRODUCTREQUEST, output_type=_PRODUCT, serialized_options=None, create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='SearchProducts', full_name='msdemo.ProductCatalogService.SearchProducts', index=2, containing_service=None, input_type=_SEARCHPRODUCTSREQUEST, output_type=_SEARCHPRODUCTSRESPONSE, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_PRODUCTCATALOGSERVICE) DESCRIPTOR.services_by_name['ProductCatalogService'] = _PRODUCTCATALOGSERVICE _SHIPPINGSERVICE = _descriptor.ServiceDescriptor( name='ShippingService', full_name='msdemo.ShippingService', file=DESCRIPTOR, index=3, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=2801, serialized_end=2951, methods=[ _descriptor.MethodDescriptor( name='GetQuote', full_name='msdemo.ShippingService.GetQuote', index=0, containing_service=None, input_type=_GETQUOTEREQUEST, output_type=_GETQUOTERESPONSE, serialized_options=None, create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='ShipOrder', full_name='msdemo.ShippingService.ShipOrder', index=1, containing_service=None, input_type=_SHIPORDERREQUEST, output_type=_SHIPORDERRESPONSE, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_SHIPPINGSERVICE) DESCRIPTOR.services_by_name['ShippingService'] = _SHIPPINGSERVICE _CURRENCYSERVICE = _descriptor.ServiceDescriptor( name='CurrencyService', full_name='msdemo.CurrencyService', file=DESCRIPTOR, index=4, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=2954, serialized_end=3117, methods=[ _descriptor.MethodDescriptor( name='GetSupportedCurrencies', full_name='msdemo.CurrencyService.GetSupportedCurrencies', index=0, containing_service=None, input_type=_EMPTY, output_type=_GETSUPPORTEDCURRENCIESRESPONSE, serialized_options=None, create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='Convert', full_name='msdemo.CurrencyService.Convert', index=1, containing_service=None, input_type=_CURRENCYCONVERSIONREQUEST, output_type=_MONEY, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_CURRENCYSERVICE) DESCRIPTOR.services_by_name['CurrencyService'] = _CURRENCYSERVICE _PAYMENTSERVICE = _descriptor.ServiceDescriptor( name='PaymentService', full_name='msdemo.PaymentService', file=DESCRIPTOR, index=5, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=3119, serialized_end=3194, methods=[ _descriptor.MethodDescriptor( name='Charge', full_name='msdemo.PaymentService.Charge', index=0, containing_service=None, input_type=_CHARGEREQUEST, output_type=_CHARGERESPONSE, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_PAYMENTSERVICE) DESCRIPTOR.services_by_name['PaymentService'] = _PAYMENTSERVICE _EMAILSERVICE = _descriptor.ServiceDescriptor( name='EmailService', full_name='msdemo.EmailService', file=DESCRIPTOR, index=6, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=3196, serialized_end=3290, methods=[ _descriptor.MethodDescriptor( name='SendOrderConfirmation', full_name='msdemo.EmailService.SendOrderConfirmation', index=0, containing_service=None, input_type=_SENDORDERCONFIRMATIONREQUEST, output_type=_EMPTY, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_EMAILSERVICE) DESCRIPTOR.services_by_name['EmailService'] = _EMAILSERVICE _CHECKOUTSERVICE = _descriptor.ServiceDescriptor( name='CheckoutService', full_name='msdemo.CheckoutService', file=DESCRIPTOR, index=7, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=3293, serialized_end=3441, methods=[ _descriptor.MethodDescriptor( name='PlaceOrder', full_name='msdemo.CheckoutService.PlaceOrder', index=0, containing_service=None, input_type=_PLACEORDERREQUEST, output_type=_PLACEORDERRESPONSE, serialized_options=None, create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='GetCacheSize', full_name='msdemo.CheckoutService.GetCacheSize', index=1, containing_service=None, input_type=_EMPTY, output_type=_CACHESIZERESPONSE, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_CHECKOUTSERVICE) DESCRIPTOR.services_by_name['CheckoutService'] = _CHECKOUTSERVICE _ADSERVICE = _descriptor.ServiceDescriptor( name='AdService', full_name='msdemo.AdService', file=DESCRIPTOR, index=8, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=3443, serialized_end=3505, methods=[ _descriptor.MethodDescriptor( name='GetAds', full_name='msdemo.AdService.GetAds', index=0, containing_service=None, input_type=_ADREQUEST, output_type=_ADRESPONSE, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_ADSERVICE) DESCRIPTOR.services_by_name['AdService'] = _ADSERVICE # @@protoc_insertion_point(module_scope)
36.735155
5,787
0.759523
8,167
69,907
6.156116
0.055712
0.044394
0.061977
0.066591
0.625833
0.593969
0.584462
0.557432
0.550769
0.514351
0
0.032827
0.118887
69,907
1,902
5,788
36.754469
0.783411
0.031685
0
0.670527
1
0.00579
0.180284
0.124939
0
0
0
0
0
1
0
false
0
0.002316
0
0.002316
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
baa4429e9cb6ca8595e022150f7358a148a19476
720
py
Python
main.py
BrunoASNascimento/cemaden_api
28edb55597e2d97282787c14f59811b1efc67849
[ "MIT" ]
null
null
null
main.py
BrunoASNascimento/cemaden_api
28edb55597e2d97282787c14f59811b1efc67849
[ "MIT" ]
null
null
null
main.py
BrunoASNascimento/cemaden_api
28edb55597e2d97282787c14f59811b1efc67849
[ "MIT" ]
null
null
null
import pandas as pd def get_state(latitude: float, longitude: float): df_info = pd.read_csv('data/st_city.csv') df_info['control_lat'] = abs(df_info['latitude']-latitude) df_info['control_lon'] = abs(df_info['longitude']-longitude) df_info['control_distance'] = abs( df_info['control_lat']+df_info['control_lon']) control_distance_min = df_info['control_distance'].min() print(df_info.nsmallest(5, 'control_distance')) print(df_info['control_distance'].min()) uf = ( df_info[df_info['control_distance'] == control_distance_min]['uf'].values[0] ) return uf #! Only test # latitude = -23.46 # longitude = -46.2 # get_state(latitude, longitude)
27.692308
64
0.666667
99
720
4.545455
0.353535
0.173333
0.231111
0.186667
0.106667
0
0
0
0
0
0
0.015228
0.179167
720
25
65
28.8
0.746193
0.108333
0
0
0
0
0.249216
0
0
0
0
0
0
1
0.066667
false
0
0.066667
0
0.2
0.133333
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
babedb1f41308c50e7315f5d98590214fd9d5729
377
py
Python
cooking_assistant/models.py
rclsilver/cooking-assistant
440f762977d5517f152b8d4b520c590e2cf0eca6
[ "Apache-2.0" ]
1
2020-12-01T16:12:28.000Z
2020-12-01T16:12:28.000Z
cooking_assistant/models.py
rclsilver/cooking-assistant
440f762977d5517f152b8d4b520c590e2cf0eca6
[ "Apache-2.0" ]
null
null
null
cooking_assistant/models.py
rclsilver/cooking-assistant
440f762977d5517f152b8d4b520c590e2cf0eca6
[ "Apache-2.0" ]
null
null
null
from django.db import models from uuid import uuid4 class Base(models.Model): id = models.UUIDField(primary_key=True, default=uuid4, editable=False) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) @property def is_new(self): return self._state.adding class Meta: abstract = True
23.5625
74
0.71618
51
377
5.137255
0.686275
0.061069
0.160305
0.19084
0.21374
0
0
0
0
0
0
0.006623
0.198939
377
15
75
25.133333
0.860927
0
0
0
0
0
0
0
0
0
0
0
0
1
0.090909
false
0
0.181818
0.090909
0.818182
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
babfdeef87b9bb4e043a3c67f42ef27cfc5cd7dc
236
py
Python
1_PythonDataProcessing/7_05_datapreprocess_dropna.py
hnwarid/DQLabAcademy
e03d82f97536ae103b6abc65db0ae16520fb68c7
[ "MIT" ]
null
null
null
1_PythonDataProcessing/7_05_datapreprocess_dropna.py
hnwarid/DQLabAcademy
e03d82f97536ae103b6abc65db0ae16520fb68c7
[ "MIT" ]
null
null
null
1_PythonDataProcessing/7_05_datapreprocess_dropna.py
hnwarid/DQLabAcademy
e03d82f97536ae103b6abc65db0ae16520fb68c7
[ "MIT" ]
null
null
null
import pandas as pd dataset = pd.read_csv('https://storage.googleapis.com/dqlab-dataset/pythonTutorial/online_raw.csv') #Drop rows with missing value dataset_clean = dataset.dropna() print('Ukuran dataset_clean:', dataset_clean.shape)
39.333333
99
0.800847
34
236
5.411765
0.735294
0.195652
0.206522
0
0
0
0
0
0
0
0
0
0.080508
236
6
100
39.333333
0.847926
0.118644
0
0
0
0
0.456731
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0.25
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
bac40791f2c80a3e02b5ff306d754d922055578f
2,077
py
Python
application/src/database_manager/authenticate_user.py
eyardley/CSC648-SoftwareEngineering-Snapster
6dbe1cf9b34de6d6dbc6be75db3a34583f67c01a
[ "MIT" ]
null
null
null
application/src/database_manager/authenticate_user.py
eyardley/CSC648-SoftwareEngineering-Snapster
6dbe1cf9b34de6d6dbc6be75db3a34583f67c01a
[ "MIT" ]
3
2021-06-08T21:39:12.000Z
2022-01-13T02:46:20.000Z
application/src/database_manager/authenticate_user.py
eyardley/CSC648-SoftwareEngineering-Snapster
6dbe1cf9b34de6d6dbc6be75db3a34583f67c01a
[ "MIT" ]
1
2021-05-09T21:01:28.000Z
2021-05-09T21:01:28.000Z
import bcrypt import base64 import hashlib import mysql.connector def authenticate_user(username, password_plain, db): user_authentication_query = ("SELECT user_id, username, password FROM user WHERE username=%s") # query the database for the record of the user try: db.query(user_authentication_query, (username,)) except mysql.connector.Error as err: # print("Internal server error with database: {}".format(err)) # FIXME: log this potentially fatal error # TODO: what other errors could occur with the connection object? error_message = { 'status': 'database_error', 'message': 'Internal database error: {}'.format(err) } return error_message if db.get_row_count() == 0: # the user entered the incorrect username success_message = { 'status': 'success', 'login': 'failed', } # print('Unsuccessful login attempt by user {}'.format(user_id)) return success_message # examine the returned record to validate the user entered credentials for (user_id, username, password) in db.fetchall(): password_entered = password_plain.encode('utf-8') # encode the plain text password entered by the user # apply bcrypt on entered password and compare with value in database if bcrypt.checkpw(base64.b64encode(hashlib.sha256(password_entered).digest()), password.encode('utf-8')): # password is a match, login was successful # success_message = generate_session(user_id, r) success_message = { 'status': 'success', 'login': 'success', 'user_id': user_id } return success_message else: # the user entered the incorrect password success_message = { 'status': 'success', 'login': 'failed', } # print('Unsuccessful login attempt by user {}'.format(user_id)) return success_message
37.089286
113
0.615792
230
2,077
5.434783
0.404348
0.0336
0.0336
0.0648
0.256
0.168
0.168
0.168
0.168
0.168
0
0.008197
0.295137
2,077
55
114
37.763636
0.845628
0.333654
0
0.314286
0
0
0.150365
0
0
0
0
0.018182
0
1
0.028571
false
0.142857
0.114286
0
0.257143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
1
0
0
0
0
0
2
bac68136c3138079f55b42eb2bae847e2e99a48e
96
py
Python
lejian/const.py
PuZheng/LEJAIN-backend
1647b63cb409842566f3d2cd9771f8b8856c1a03
[ "MIT" ]
null
null
null
lejian/const.py
PuZheng/LEJAIN-backend
1647b63cb409842566f3d2cd9771f8b8856c1a03
[ "MIT" ]
13
2015-10-23T04:43:51.000Z
2015-12-19T14:30:33.000Z
lejian/const.py
PuZheng/lejian-backend
1647b63cb409842566f3d2cd9771f8b8856c1a03
[ "MIT" ]
null
null
null
# -*- coding: UTF-8 -*- VENDOR_GROUP = 1 CUSTOMER_GROUP = 2 RETAILER_GROUP = 3 SUPER_ADMIN = 4
13.714286
23
0.677083
15
96
4.066667
0.866667
0
0
0
0
0
0
0
0
0
0
0.064103
0.1875
96
6
24
16
0.717949
0.21875
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
bacbee1c69cf258bcc495209c9b24e8f73c47fe9
63
py
Python
frontends/mrxl/mrxl/__init__.py
pbonh/calyx
08e49109292d151fb5b229c304da4431477223d7
[ "MIT" ]
109
2021-02-14T23:08:05.000Z
2022-03-25T23:50:58.000Z
frontends/mrxl/mrxl/__init__.py
yzh119/calyx
2eedbe85cdff3481ea1dcf772ea7cc9bacd2be77
[ "MIT" ]
432
2021-02-05T19:10:25.000Z
2022-03-30T19:03:50.000Z
frontends/mrxl/mrxl/__init__.py
yzh119/calyx
2eedbe85cdff3481ea1dcf772ea7cc9bacd2be77
[ "MIT" ]
21
2021-02-14T23:27:52.000Z
2022-02-03T06:22:43.000Z
"""The MrXL compiler and interpreter""" __version__ = "0.1.0"
15.75
39
0.68254
9
63
4.333333
0.888889
0
0
0
0
0
0
0
0
0
0
0.055556
0.142857
63
3
40
21
0.666667
0.52381
0
0
0
0
0.208333
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
bad48eb855d76a5d1fd080cfe34d0a3239630e97
1,408
py
Python
djaio/core/exceptions.py
domclick/djaio
f69f70e575f72fd3679ecf7fdb53f4531d20a590
[ "Apache-2.0" ]
5
2019-02-19T10:36:52.000Z
2021-02-19T22:58:21.000Z
djaio/core/exceptions.py
resslerruntime/djaio
f69f70e575f72fd3679ecf7fdb53f4531d20a590
[ "Apache-2.0" ]
2
2021-02-26T02:12:45.000Z
2022-01-21T18:56:26.000Z
djaio/core/exceptions.py
resslerruntime/djaio
f69f70e575f72fd3679ecf7fdb53f4531d20a590
[ "Apache-2.0" ]
1
2021-12-11T03:26:30.000Z
2021-12-11T03:26:30.000Z
# -*- coding: utf-8 -*- class BaseApiException(Exception): status_code = None message = None detail = None def to_dict(self): _output = { 'code': self.status_code, 'message': self.message } if self.detail: _output.update({'detail': self.detail}) return _output class ObjectAlreadyExistException(BaseApiException): status_code = 409 message = 'Object with provided data already exist' class ObjectNotFoundException(BaseApiException): # ToDo rewrite _id to key in next release status_code = 404 message = 'Object with KEY %s not found' def __init__(self, _id, *args, **kwargs): self.message = self.message % _id super().__init__(args, kwargs) class BadRequestException(BaseApiException): status_code = 400 def __init__(self, *args, **kwargs): self.message = kwargs.get('message') super().__init__(args, kwargs) class UnauthorizedException(BaseApiException): status_code = 401 message = 'Can not authorized with provided data' class ForbiddenException(BaseApiException): status_code = 403 def __init__(self, *args, **kwargs): self.message = kwargs.get('message', 'forbidden') super().__init__(args, kwargs) class TooManyRequestsException(BaseApiException): status_code = 429 message = 'too many requests'
24.275862
57
0.659091
148
1,408
6.006757
0.391892
0.089989
0.146232
0.070866
0.188976
0.107987
0.107987
0.107987
0.107987
0.107987
0
0.01774
0.239347
1,408
57
58
24.701754
0.812325
0.043324
0
0.135135
0
0
0.119792
0
0
0
0
0.017544
0
1
0.108108
false
0
0
0
0.675676
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
baf9c9ef7ebf815fe5507e71e14cc50fe05c9189
1,613
py
Python
config.sample.py
babyoumine/gulag
9a317c72b16058b998ee137624bfab54cfc2ef7c
[ "MIT" ]
null
null
null
config.sample.py
babyoumine/gulag
9a317c72b16058b998ee137624bfab54cfc2ef7c
[ "MIT" ]
null
null
null
config.sample.py
babyoumine/gulag
9a317c72b16058b998ee137624bfab54cfc2ef7c
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ server settings """ # The address which the server runs on. # The server supports both INET4 and UNIX sockets. # For INET sockets, set to (addr: str, port: int), # For UNIX sockets, set to the path of the socket. server_addr = '/tmp/gulag.sock' # The max amount of concurrent # connections gulag will hold. max_conns = 16 # Displays additional information in the # console, generally for debugging purposes. debug = False # Whether the server is running in 'production mode'. # Having this as false will disable some features that # aren't used during testing. server_build = True # Your MySQL authentication info. # XXX: we may switch to postgres in the future.. mysql = { 'db': 'gulag', 'host': 'localhost', 'password': 'supersecure', 'user': 'cmyui' } # Your osu!api key. This is required for fetching # many things, such as beatmap information! osu_api_key = '' """ osu!direct """ # TODO: add max size to cache on disk. # perhaps could even make a system to track # the most commonly downloaded maps to cache? # Whether you'd like gulag to cache maps on disk. # gulag will still use an external mirror for new # downloads, but will keep a cache of osz files # for ultra speedy downloads. mirror = True # The URL of an external mirror # to use for non-cached maps. external_mirror = 'https://osu.gatari.pw' """ customization """ # The menu icon displayed on # the main menu of osu! ingame. menu_icon = ( 'https://link.to/my_image.png', # image url 'https://github.com/cmyui/gulag' # onclick url ) # Ingame bot command prefix. command_prefix = '!'
26.442623
54
0.706758
245
1,613
4.616327
0.604082
0.023873
0.02122
0
0
0
0
0
0
0
0
0.00306
0.189709
1,613
60
55
26.883333
0.86228
0.696218
0
0
0
0
0.344578
0
0
0
0
0.016667
0
1
0
false
0.055556
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
1
0
0
0
0
0
2
24236cf60f73fd9dadc9482ba8dc12905ed6c3e2
1,344
py
Python
qermit/__init__.py
CQCL/qermit
931e8ee3bfcf4497f4be9e9278a9544d3136e67c
[ "Apache-2.0" ]
11
2021-05-28T00:20:42.000Z
2022-02-05T14:14:27.000Z
qermit/__init__.py
CQCL/qermit
931e8ee3bfcf4497f4be9e9278a9544d3136e67c
[ "Apache-2.0" ]
4
2021-10-08T10:13:13.000Z
2022-03-27T20:23:07.000Z
qermit/__init__.py
CQCL/qermit
931e8ee3bfcf4497f4be9e9278a9544d3136e67c
[ "Apache-2.0" ]
3
2021-06-11T10:04:51.000Z
2022-01-12T19:19:22.000Z
# Copyright 2019-2021 Cambridge Quantum Computing # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ qermit provides tools for running and composing error-mitigation methods. Error mitigation methods are split into two types, those which in some manner modify the set of shots returned when running quantum circuits on quantum devices (MitRes), and those which modify the expectation value of some observable (MitEx). """ from qermit.taskgraph.task_graph import TaskGraph from qermit.taskgraph.mittask import ( MitTask, AnsatzCircuit, CircuitShots, ObservableExperiment, ) from qermit.taskgraph.mitres import MitRes from qermit.taskgraph.mitex import MitEx from qermit.taskgraph.utils import SymbolsDict, MeasurementCircuit, ObservableTracker __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
39.529412
92
0.786458
185
1,344
5.616216
0.610811
0.057748
0.091434
0.030799
0
0
0
0
0
0
0
0.010536
0.15253
1,344
33
93
40.727273
0.901668
0.671131
0
0
0
0
0.016627
0
0
0
0
0
0
1
0
false
0
0.545455
0
0.545455
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
24251b787d901524feb0ae346eaaeaac8b494b50
907
py
Python
python/lamda Anonymous Function.py
itzsoumyadip/vs
acf32cd0bacb26e62854060e0acf5eb41b7a68c8
[ "Unlicense" ]
1
2019-07-05T04:27:05.000Z
2019-07-05T04:27:05.000Z
python/lamda Anonymous Function.py
itzsoumyadip/vs
acf32cd0bacb26e62854060e0acf5eb41b7a68c8
[ "Unlicense" ]
null
null
null
python/lamda Anonymous Function.py
itzsoumyadip/vs
acf32cd0bacb26e62854060e0acf5eb41b7a68c8
[ "Unlicense" ]
null
null
null
# FUNCTION IN PYTHON IS FUNCTION #lamda is a name less function f=lambda a,b: a+b # it only evaluate single expression result=f(5,6) print(result) ## comparison between normal function and lambda ## normal function method nums =[6,8,2,4,5,7,3,9,21,45,98,36,57,78,73,91,19,85,93] def is_even(n): return n%2==0 print(list(filter(is_even,nums))) #[6, 8, 2, 4, 98, 36, 78] ## normal fuction call for reduce example below ## by lambda print(list(filter(lambda n:n%2==0 ,nums))) #[6, 8, 2, 4, 98, 36, 78] ## doubles the value by map print(list(map(lambda n:n*2,nums))) #[12, 16, 4, 8, 10, 14, 6, 18, 42, 90, 196, 72, 114, 156, 146, 182, 38, 170, 186] ## add all value of the list by use of reduce from functools import reduce print(reduce(lambda a,b: a+b,nums)) ## normal fuction way for reduce def add(a,b): return a+b print(type(nums)) print(reduce(add,nums))
18.510204
81
0.648291
176
907
3.329545
0.488636
0.020478
0.030717
0.035836
0.095563
0.047782
0.047782
0.047782
0
0
0
0.135616
0.195149
907
48
82
18.895833
0.667123
0.495039
0
0
0
0
0
0
0
0
0
0
0
1
0.133333
false
0
0.066667
0.133333
0.333333
0.466667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
1
0
2
2440ad706a5bb2c27050415b20f3edc5eb61711a
777
py
Python
tests/validators/test_genomic_duplication.py
cancervariants/varlex
3806317fa0125c3098e80124d5169fe6a13d50db
[ "MIT" ]
null
null
null
tests/validators/test_genomic_duplication.py
cancervariants/varlex
3806317fa0125c3098e80124d5169fe6a13d50db
[ "MIT" ]
15
2019-10-23T17:35:42.000Z
2020-05-05T21:04:01.000Z
tests/validators/test_genomic_duplication.py
cancervariants/varlex
3806317fa0125c3098e80124d5169fe6a13d50db
[ "MIT" ]
null
null
null
"""Module for testing Genomic Duplication Validator.""" import unittest from variation.validators import GenomicDuplication from variation.classifiers import GenomicDuplicationClassifier from .validator_base import ValidatorBase class TestGenomicDuplicationValidator(ValidatorBase, unittest.TestCase): """A class to test the Genomic Duplication Validator.""" def validator_instance(self): """Return genomic duplication instance.""" return GenomicDuplication(*self.params) def classifier_instance(self): """Return the genomic duplication classifier instance.""" return GenomicDuplicationClassifier() def fixture_name(self): """Return the fixture name for genomic duplication.""" return "genomic_duplication"
33.782609
72
0.75547
74
777
7.864865
0.418919
0.185567
0.092784
0
0
0
0
0
0
0
0
0
0.16731
777
22
73
35.318182
0.899536
0.306306
0
0
0
0
0.037037
0
0
0
0
0
0
1
0.272727
false
0
0.363636
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
0
0
0
2
244c25ced398e280ac5400da1c7c6cefbee36d50
929
py
Python
contactus/views.py
neyona/underwaterfortune-2021-version
128f9359a3b6fb9fd6e3f39785f0eb9928dbb265
[ "MIT" ]
null
null
null
contactus/views.py
neyona/underwaterfortune-2021-version
128f9359a3b6fb9fd6e3f39785f0eb9928dbb265
[ "MIT" ]
null
null
null
contactus/views.py
neyona/underwaterfortune-2021-version
128f9359a3b6fb9fd6e3f39785f0eb9928dbb265
[ "MIT" ]
null
null
null
# frontend_docker/contactus/views.py from rest_framework import generics, permissions from rest_framework.parsers import JSONParser from rest_framework.response import Response from rest_framework import status from rest_framework_api_key.models import APIKey from rest_framework_api_key.permissions import HasAPIKey from django.views.decorators.csrf import ensure_csrf_cookie from .models import Contact from .serializers import ContactSerializer # The following takes in messages via frontend form, they are allowed when # there is API key in the header. class ContactListCreate(generics.ListCreateAPIView): queryset = Contact.objects.all() serializer_class = ContactSerializer parser_classes = [JSONParser] permission_classes = ([HasAPIKey | permissions.IsAuthenticated]) @ensure_csrf_cookie def contact_list(request): return Response({"message": "Got some data!", "data": request.data})
35.730769
76
0.805167
115
929
6.33913
0.530435
0.065844
0.139918
0.0631
0.0631
0
0
0
0
0
0
0
0.133477
929
25
77
37.16
0.90559
0.1507
0
0
0
0
0.031847
0
0
0
0
0
0
1
0.058824
false
0
0.529412
0.058824
0.941176
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
2459162827ada17e369c07b78ea8d4477b85e147
2,529
py
Python
Apps/CesiumTopo/testcreate_geojson_polygon_from_viz_json.py
ungentilgarcon/cesiumviz
dd4c2bccde05e4f1272248cc0cedc075cdbeed96
[ "Apache-2.0" ]
null
null
null
Apps/CesiumTopo/testcreate_geojson_polygon_from_viz_json.py
ungentilgarcon/cesiumviz
dd4c2bccde05e4f1272248cc0cedc075cdbeed96
[ "Apache-2.0" ]
null
null
null
Apps/CesiumTopo/testcreate_geojson_polygon_from_viz_json.py
ungentilgarcon/cesiumviz
dd4c2bccde05e4f1272248cc0cedc075cdbeed96
[ "Apache-2.0" ]
null
null
null
# -- coding: utf-8 -- # GENERATEUR DE FEATURE COLLECTION # DE POLYGONES DEPUIS UN POINT GEO # carré: # "({1}-{3},{2}+{3})({1}{3},{2}{3})({1}{3},{2}{3})({1}{3},{2}{3})({1}{3},{2}{3})" # soit: # ({1}{4}{3},{2}{5}{3})n+1 fois(signe1,signe2) # n étant le nombre d'arêtes # et avec {4},{5} ou signe1,signe2 décrivant: # __ # 01 # 00 # 10 # 11 # 01 # __ # # 3 aretes et plus: #au besoin:* #({1}+{3}*cos{6},{2}+{3}*sin{6})n+1 fois # n étant le nombre d'arêtes # 7 etant égal à cos ou sin # et avec {4}{5}{6} décrivant: # {3} = float(row['count'])/width # {6}=angle # u = unit # v = 360 # pas= 360 / n # {6}= angle = 0 # for i = 0 to n # {6}=i*pas # i++ # on peut aussi faire la moitie du travail et reintegrer {4}et {5} et se passer alors des nombres premiers... # import json import os from geojson import Feature, Point, FeatureCollection, Polygon '''//ouvrir fichier points de viz, utiliser geojson pour en faire un geojson import json''' dataout_file = open('exportbands_all_venues.geojson','a+'); f = open('exportbands_all_venues.json'); data = json.loads(f.read()) f.close() width = 1000 min_number_gig = 1 z = len(data) print z y = 0 nb_rec=0 for row in data: print row print row['count'] print z if row['count'] > min_number_gig: nb_rec += 1 # TODO: pass if null # pour l'instant my_poly = Polygon([[( row['longitude']-float(row['count'])/width, row['latitude']+float(row['count'])/width),(row['longitude']-float(row['count'])/width, row['latitude']-float(row['count'])/width),(row['longitude']+float(row['count'])/width, row['latitude']-float(row['count'])/width),(row['longitude']+float(row['count'])/width, row['latitude']+float(row['count'])/width),(row['longitude']-float(row['count'])/width, row['latitude']+float(row['count'])/width)]]) geojs = Feature(geometry=my_poly,properties={"type": "Salle"}) # print my_poly # print geojs if z == 1: json.dump(geojs,dataout_file) terminator = "]}" dataout_file.write(terminator) z -= 1 elif y == 0: # geocoll = "{'type': 'FeatureCollection', 'features': [" dataout_file.write("{\"type\": \"FeatureCollection\", \"features\": [") json.dump(geojs,dataout_file) y += 1 z -= 1 print y else : dataout_file.write(',') json.dump(geojs,dataout_file) z -= 1 # json.dump (geojsvirgule,dataout_file) else: if z == 1: terminator = "]}" dataout_file.write(terminator) z -= 1 else : z -= 1 # json.dump (geojsvirgule,dataout_file) print nb_rec
24.553398
109
0.614472
389
2,529
3.922879
0.349614
0.068152
0.093709
0.129751
0.374181
0.326999
0.299476
0.206422
0.206422
0.206422
0
0.041867
0.178331
2,529
103
110
24.553398
0.692493
0.349545
0
0.46
1
0
0.151355
0.037674
0
0
0
0.009709
0
0
null
null
0
0.06
null
null
0.12
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
246a8c72d45ed72ff1caf78b283902325ac64d34
2,281
py
Python
device/system.py
longyangking/Device
b8cf9fce5a8921dea179b0bb5a58a1a70dfb8f08
[ "MIT" ]
null
null
null
device/system.py
longyangking/Device
b8cf9fce5a8921dea179b0bb5a58a1a70dfb8f08
[ "MIT" ]
null
null
null
device/system.py
longyangking/Device
b8cf9fce5a8921dea179b0bb5a58a1a70dfb8f08
[ "MIT" ]
null
null
null
import numpy as np class Point: def __init__(self, position, onsite_func): self.position = position self.onsite_func = onsite_func def get_onsite(self, t): return self.onsite_func(t) class Link: def __init__(self, start_point, end_point, coupling_func): self.start_point = start_point self.end_point = end_point self.coupling_func = coupling_func def get_coupling(self, t): return self.coupling_func(t) def get_startpoint(self): return self.start_point def get_endpoint(self): return self.end_point class System: def __init__(self, points=None, links=None): self.points = list() if points is not None: self.points = points self.links = list() if links is not None: self.links = links def add_point(self, point): self.points.append(point) def add_link(self, link): self.links.append(link) def get_hamiltonian(self, t): n_sites = len(self.points) hamiltonian = np.zeros((n_sites,n_sites)) for i in range(n_sites): hamiltonian[i,i] = self.points[i].get_onsite(t) n_couplings = len(self.links) for i in range(n_couplings): start_point = self.links[i].get_startpoint() end_point = self.links[i].get_endpoint() start_index = None end_index = None for m in range(n_sites): if self.points[m] == start_point: start_index = m if self.points[m] == end_point: end_index = m if (start_index is not None) and (end_index is not None): break start2end, end2start = self.links[i].get_coupling(t) hamiltonian[start_index,end_index] = end2start hamiltonian[end_index,start_index] = start2end return hamiltonian def get_points(self): return self.points def get_n_sites(self): return len(self.points) def get_links(self): return self.links def __setitem__(self,name,value): pass def __getitem__(self,name): pass def __delitem__(self,name): pass
27.154762
73
0.588338
292
2,281
4.335616
0.181507
0.078989
0.044234
0.030806
0.047393
0
0
0
0
0
0
0.002596
0.324419
2,281
84
74
27.154762
0.818949
0
0
0.046875
0
0
0
0
0
0
0
0
0
1
0.25
false
0.046875
0.015625
0.109375
0.4375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
79e47adf9e605ceb1b925fd413d7fd87a3a1a27b
393
py
Python
main.py
amorriscode/terminal-hero
321bc501e6ab7f6678c83fd48f016dcd5f943999
[ "MIT" ]
6
2020-04-11T15:54:02.000Z
2021-08-05T16:39:41.000Z
main.py
amorriscode/terminal-hero
321bc501e6ab7f6678c83fd48f016dcd5f943999
[ "MIT" ]
22
2020-04-12T00:17:30.000Z
2020-11-10T16:00:57.000Z
main.py
amorriscode/terminal-hero
321bc501e6ab7f6678c83fd48f016dcd5f943999
[ "MIT" ]
2
2020-06-27T00:42:05.000Z
2020-11-26T14:38:59.000Z
from asciimatics.exceptions import ResizeScreenError from asciimatics.screen import Screen from th.state import GameState from th.screens import stage if __name__ == "__main__": game_state = GameState() while True: try: Screen.wrapper(stage, catch_interrupt=False, arguments=[game_state]) sys.exit(0) except ResizeScreenError: pass
28.071429
80
0.692112
44
393
5.931818
0.636364
0.114943
0
0
0
0
0
0
0
0
0
0.003344
0.239186
393
13
81
30.230769
0.869565
0
0
0
0
0
0.020356
0
0
0
0
0
0
1
0
false
0.083333
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
0
0
0
2
79e82a3667e4ad2584a011477c76a6e4712679f5
366
py
Python
exercise/week4/ex2.py
jamestiotio/DW2020
a8c575f1afd34d66dce2798a11b2fa15d1214a43
[ "MIT" ]
null
null
null
exercise/week4/ex2.py
jamestiotio/DW2020
a8c575f1afd34d66dce2798a11b2fa15d1214a43
[ "MIT" ]
1
2021-11-04T16:30:26.000Z
2021-11-04T16:30:26.000Z
exercise/week4/ex2.py
jamestiotio/DW2020
a8c575f1afd34d66dce2798a11b2fa15d1214a43
[ "MIT" ]
null
null
null
def interlock(word1, word2, word3): if (not word1) or (not word2) or (not word3): return False interlocked = "" for i in range(len(min([word1, word2], key=len))): interlocked += (word1[i] + word2[i]) if word3 == (interlocked + max([word1, word2], key=len)[len(min([word1, word2], key=len)):]): return True return False
28.153846
97
0.587432
50
366
4.3
0.4
0.186047
0.181395
0.223256
0.204651
0.204651
0
0
0
0
0
0.054348
0.245902
366
13
98
28.153846
0.724638
0
0
0.222222
0
0
0
0
0
0
0
0
0
1
0.111111
false
0
0
0
0.444444
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
79ea7143ab7b88de5f968a964021b1d5e60af006
1,245
py
Python
scripts/twice.py
Ryo0927/robosys_ros
563abf2a87422521ba56a2d74ce1ce0a8eefc50e
[ "BSD-3-Clause" ]
null
null
null
scripts/twice.py
Ryo0927/robosys_ros
563abf2a87422521ba56a2d74ce1ce0a8eefc50e
[ "BSD-3-Clause" ]
null
null
null
scripts/twice.py
Ryo0927/robosys_ros
563abf2a87422521ba56a2d74ce1ce0a8eefc50e
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python3 import rospy from std_msgs.msg import Int32 zodiac_num = 0 print("卯") def cb(message): global zodiac_num zodiac_num = message.data zodiac_num = zodiac_num - 1995 while zodiac_num > 12: if zodiac_num > 12: zodiac_num = zodiac_num - 12 elif zodiac_num <= 12: zodiac_num = zodiac_num if zodiac_num % 12 == 0: print("亥") elif zodiac_num % 11 == 0: print("戌") elif zodiac_num % 10 == 0: print("酉") elif zodiac_num % 9 == 0: print("申") elif zodiac_num % 8 == 0: print("未") elif zodiac_num % 7 == 0: print("午") elif zodiac_num % 6 == 0: print("巳") elif zodiac_num % 5 == 0: print("辰") elif zodiac_num % 4 == 0: print("卯") elif zodiac_num % 3 == 0: print("寅") elif zodiac_num % 2 == 0: print("丑") elif zodiac_num % 1 == 0: print("子") if __name__ == '__main__': rospy.init_node('twice') sub = rospy.Subscriber('count_up', Int32, cb) pub = rospy.Publisher('twice', Int32, queue_size=1) rate = rospy.Rate(1) while not rospy.is_shutdown(): pub.publish(zodiac_num) rate.sleep()
23.055556
55
0.539759
174
1,245
3.643678
0.385057
0.35489
0.246057
0.113565
0.091483
0.091483
0.091483
0
0
0
0
0.058542
0.327711
1,245
53
56
23.490566
0.698925
0.016867
0
0.044444
0
0
0.031889
0
0
0
0
0
0
1
0.022222
false
0
0.044444
0
0.066667
0.288889
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
79ebfa0f01e980e9e0acd802bf25313753f62802
778
py
Python
user_base/users/api/views/deleteacountview.py
jeremytrips/django_base_project
0be85a549d2910117aceb71634b03178c66caf98
[ "MIT" ]
null
null
null
user_base/users/api/views/deleteacountview.py
jeremytrips/django_base_project
0be85a549d2910117aceb71634b03178c66caf98
[ "MIT" ]
null
null
null
user_base/users/api/views/deleteacountview.py
jeremytrips/django_base_project
0be85a549d2910117aceb71634b03178c66caf98
[ "MIT" ]
1
2021-01-06T15:37:53.000Z
2021-01-06T15:37:53.000Z
from rest_framework.authtoken.views import ObtainAuthToken from rest_framework.authtoken.models import Token from rest_framework.permissions import IsAuthenticated, AllowAny from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT, HTTP_201_CREATED, HTTP_206_PARTIAL_CONTENT from rest_framework.views import APIView from django.contrib.auth import get_user_model from users.permissions import IsEmailVerfied, IsAccountVisible class DeleteAccount(APIView): """ Manage the user deletion. """ permission_classes = [IsEmailVerfied, ] def post(self, request): user = request.user user.is_active = False user.save() return Response(data=["DELETED"], status=HTTP_200_OK)
35.363636
110
0.780206
97
778
6.030928
0.536082
0.082051
0.174359
0.088889
0
0
0
0
0
0
0
0.022831
0.155527
778
22
111
35.363636
0.86758
0.032134
0
0
0
0
0.009537
0
0
0
0
0
0
1
0.066667
false
0
0.533333
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
0307150f2f40ca0e3c31fcdc65e4a435d2f128b8
231
py
Python
nomenclateAPI/run.py
AndresMWeber/nomenclate-api
1a098918ce54bb6083bd001780de21b27a2669eb
[ "MIT" ]
null
null
null
nomenclateAPI/run.py
AndresMWeber/nomenclate-api
1a098918ce54bb6083bd001780de21b27a2669eb
[ "MIT" ]
null
null
null
nomenclateAPI/run.py
AndresMWeber/nomenclate-api
1a098918ce54bb6083bd001780de21b27a2669eb
[ "MIT" ]
null
null
null
from app import app from db import db db.init_app(app) @app.before_first_request def create_tables(): db.create_all() if __name__ == '__main__': from db import db db.init_app(app) app.run(port=5000, debug=True)
15.4
34
0.701299
39
231
3.794872
0.512821
0.162162
0.162162
0.189189
0.391892
0.391892
0.391892
0.391892
0.391892
0
0
0.02139
0.190476
231
15
34
15.4
0.770053
0
0
0.4
0
0
0.034483
0
0
0
0
0
0
1
0.1
true
0
0.3
0
0.4
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
03078ea2286b51a950394e75821c5c94423899ac
3,922
py
Python
pegasus/service/tests/test_users.py
nondejus/pegasus-service
01d421523d375153f1aae631718580572e78f1ec
[ "Apache-2.0" ]
4
2015-01-25T03:06:51.000Z
2021-11-08T13:02:39.000Z
pegasus/service/tests/test_users.py
nondejus/pegasus-service
01d421523d375153f1aae631718580572e78f1ec
[ "Apache-2.0" ]
null
null
null
pegasus/service/tests/test_users.py
nondejus/pegasus-service
01d421523d375153f1aae631718580572e78f1ec
[ "Apache-2.0" ]
3
2018-02-05T17:24:15.000Z
2020-02-05T17:25:53.000Z
from pegasus.service import db, tests, users from pegasus.service.users import User from sqlalchemy.exc import IntegrityError class TestUsers(tests.TestCase): def test_validate_password(self): self.assertRaises(users.InvalidPassword, users.validate_password, None) self.assertRaises(users.InvalidPassword, users.validate_password, "") self.assertRaises(users.InvalidPassword, users.validate_password, "abc") self.assertRaises(users.InvalidPassword, users.validate_password, self) users.validate_password("abcd") users.validate_password("secret") def test_hash_password(self): shorthash = users.hash_password("abcd") self.assertEquals(len(shorthash),87) secrethash = users.hash_password("secret") self.assertEquals(len(secrethash),87) def test_verify_password(self): self.assertTrue(users.verify_password("secret", users.hash_password("secret"))) self.assertTrue(users.verify_password("abcd", users.hash_password("abcd"))) def test_userpass(self): u = User(username="gideon",password="secret",email="gideon@isi.edu") # Make sure the correct password matches self.assertTrue(u.password_matches("secret")) # Make sure an incorrect password does not match self.assertFalse(u.password_matches("secrets")) self.assertFalse(u.password_matches("")) self.assertFalse(u.password_matches(None)) class TestUsersDB(tests.DBTestCase): def test_usercreate(self): # Make sure we can insert a new user u1 = users.create(username="gideon", password="secret", email="gideon@isi.edu") # Make sure one user exists self.assertEquals(User.query.count(), 1) # Make sure the user matches what we specified u2 = User.query.first() self.assertEquals(u1.username, u2.username) self.assertEquals(u1.hashpass, u2.hashpass) self.assertEquals(u1.email, u2.email) def test_userdupes(self): users.create(username="gideon", password="secret", email="gideon@isi.edu") self.assertRaises(users.UserExists, users.create, "gideon", "private", "juve@usc.edu") def test_passwd(self): gideon = users.create("gideon", "secret", "gideon@isi.edu") self.assertTrue(gideon.password_matches("secret")) # original passwd users.passwd("gideon", "newsecret") self.assertTrue(gideon.password_matches("newsecret")) # new passwd gideon2 = users.getuser("gideon") self.assertTrue(gideon2.password_matches("newsecret")) # new passwd def test_usermod(self): gideon = users.create("gideon", "secret", "gideon@isi.edu") self.assertEquals(gideon.email, "gideon@isi.edu") # original email users.usermod("gideon", "juve@usc.edu") self.assertEquals(gideon.email, "juve@usc.edu") # new email def test_all(self): l = users.all() self.assertEquals(len(l), 0) # should not be any users users.create("gideon", "secret", "gideon@isi.edu") l = users.all() self.assertEquals(len(l), 1) # should be 1 user users.create("rynge", "secret", "rynge@isi.edu") l = users.all() self.assertEquals(len(l), 2) # should be 2 users def test_getuser(self): gideon = users.create("gideon", "secret", "gideon@isi.edu") g2 = users.getuser("gideon") self.assertEquals(gideon.username, g2.username) self.assertEquals(gideon.hashpass, g2.hashpass) self.assertEquals(gideon.email, g2.email) self.assertRaises(users.NoSuchUser, users.getuser, "rynge") class TestAuthentication(tests.APITestCase): def test_unauthorized(self): r = self.get("/", auth=False) self.assertEquals(r.status_code, 401) def test_authorized(self): r = self.get("/") self.assertEquals(r.status_code, 200)
37.711538
94
0.669811
468
3,922
5.536325
0.224359
0.098804
0.037051
0.055577
0.42802
0.260903
0.260903
0.196063
0.148977
0.104593
0
0.00956
0.199898
3,922
103
95
38.07767
0.816125
0.079806
0
0.084507
0
0
0.108236
0
0
0
0
0
0.43662
1
0.169014
false
0.394366
0.042254
0
0.253521
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
1
0
0
0
0
0
2
030c3ebc796e19ffbe883761b81fbc245402b876
454
py
Python
app/views/redirects.py
davimiku/squawker
968de5b82ddc1d610af248a70840533c9d7b5cb9
[ "MIT" ]
null
null
null
app/views/redirects.py
davimiku/squawker
968de5b82ddc1d610af248a70840533c9d7b5cb9
[ "MIT" ]
null
null
null
app/views/redirects.py
davimiku/squawker
968de5b82ddc1d610af248a70840533c9d7b5cb9
[ "MIT" ]
null
null
null
from app import app from flask import request, redirect, url_for, session, flash, render_template @app.route('/save_to_local_storage') def save_to_local_storage(): access_token = request.args.get('access_token', '') user_id = request.args.get('user_id', '') redirect_location = request.args.get('redirect', '') return render_template('save_to_local_storage.html', access_token=access_token, user_id=user_id, redirect=redirect_location)
45.4
128
0.764317
65
454
5
0.415385
0.135385
0.101538
0.166154
0
0
0
0
0
0
0
0
0.112335
454
9
129
50.444444
0.806452
0
0
0
0
0
0.165198
0.105727
0
0
0
0
0
1
0.125
false
0
0.25
0
0.5
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
030d2e861057358b8f949ec5e021745ea215adaf
572
py
Python
deem/pytorch/losses/softmax_crossentropy_loss.py
xxaxtt/TwoTowers
206c6b38a2f72486906d391c5176e4508036aac0
[ "Apache-2.0" ]
14
2021-09-22T02:24:16.000Z
2021-12-11T11:59:02.000Z
deem/pytorch/losses/softmax_crossentropy_loss.py
xxaxtt/TwoTowers
206c6b38a2f72486906d391c5176e4508036aac0
[ "Apache-2.0" ]
2
2021-10-16T04:39:21.000Z
2021-12-01T08:04:46.000Z
deem/pytorch/losses/softmax_crossentropy_loss.py
xxaxtt/TwoTowers
206c6b38a2f72486906d391c5176e4508036aac0
[ "Apache-2.0" ]
5
2021-10-09T11:47:53.000Z
2021-11-25T04:41:24.000Z
import torch.nn as nn import torch import torch.nn.functional as F import numpy as np class SoftmaxCrossEntropyLoss(nn.Module): def __init__(self): """ :param num_negs: number of negative instances in bpr loss. """ super(SoftmaxCrossEntropyLoss, self).__init__() def forward(self, y_pred, y_true): """ :param y_true: Labels :param y_pred: Predicted result. """ probs = F.softmax(y_pred, dim=1) hit_probs = probs[:, 0] loss = -torch.log(hit_probs).mean() return loss
26
66
0.61014
73
572
4.561644
0.561644
0.099099
0.078078
0
0
0
0
0
0
0
0
0.00489
0.284965
572
22
67
26
0.809291
0.197552
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
032193e21f08743c1d6d5e8d4a24817df6f924e3
313
py
Python
__init__.py
isikhar/xia2
b81a98ab5debfa10f0fceb413ee7701f44d21470
[ "BSD-3-Clause" ]
null
null
null
__init__.py
isikhar/xia2
b81a98ab5debfa10f0fceb413ee7701f44d21470
[ "BSD-3-Clause" ]
1
2016-09-19T13:53:01.000Z
2016-10-21T09:42:11.000Z
__init__.py
isikhar/xia2
b81a98ab5debfa10f0fceb413ee7701f44d21470
[ "BSD-3-Clause" ]
1
2015-07-21T07:35:19.000Z
2015-07-21T07:35:19.000Z
import sys import warnings if sys.version_info.major == 2: warnings.warn( "Python 2 is no longer supported. Please consider using the DIALS 2.2 release branch. " "For more information on Python 2.7 support please go to https://github.com/dials/dials/issues/1175.", UserWarning, )
31.3
110
0.690096
46
313
4.673913
0.76087
0.065116
0
0
0
0
0
0
0
0
0
0.041152
0.223642
313
9
111
34.777778
0.843621
0
0
0
0
0.125
0.587859
0
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
2
03298c9e31460f50f8ed26e86ca88d4019fb2f61
426
py
Python
py-data/backend-interpreter/problems/api-related/1/correct-usages/RethinkInterface.py
ualberta-smr/NFBugs
65d9ef603e9527b3d83f53af0606b1ae240513f1
[ "MIT" ]
3
2019-10-01T19:58:24.000Z
2021-09-17T04:03:21.000Z
py-data/backend-interpreter/problems/api-related/1/correct-usages/RethinkInterface.py
senseconcordia/NFBugsExtended
60058ccbd64107018a92ede73056d08ecbdaaed2
[ "MIT" ]
22
2018-08-23T15:15:37.000Z
2019-03-15T17:09:41.000Z
py-data/backend-interpreter/problems/api-related/1/correct-usages/RethinkInterface.py
senseconcordia/NFBugsExtended
60058ccbd64107018a92ede73056d08ecbdaaed2
[ "MIT" ]
1
2019-02-11T18:26:36.000Z
2019-02-11T18:26:36.000Z
from multiprocessing import Queue from os import environ from queue import Empty from sys import exit as sysexit from time import sleep, time import rethinkdb class RethinkInterface: def pattern(self, output_data): # ... self.logger.send(["dbprocess", "".join(("There is no job with an id of ", output_data[0])), 30, time() ])
21.3
76
0.577465
50
426
4.88
0.7
0.081967
0
0
0
0
0
0
0
0
0
0.010676
0.340376
426
19
77
22.421053
0.857651
0.007042
0
0
0
0
0.092637
0
0
0
0
0
0
1
0.076923
false
0
0.461538
0
0.615385
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
033b27fdf130086ab561204693312d10e86276b0
1,789
py
Python
poco/apps/web/adminboard/urls.py
sunliwen/poco
a4b8c4ede63711eea42a444fb9d922c350855364
[ "MIT" ]
null
null
null
poco/apps/web/adminboard/urls.py
sunliwen/poco
a4b8c4ede63711eea42a444fb9d922c350855364
[ "MIT" ]
7
2019-03-22T06:26:39.000Z
2021-06-10T19:36:06.000Z
poco/apps/web/adminboard/urls.py
sunliwen/poco
a4b8c4ede63711eea42a444fb9d922c350855364
[ "MIT" ]
1
2017-10-25T03:43:51.000Z
2017-10-25T03:43:51.000Z
from django.conf.urls import patterns, include, url urlpatterns = patterns('', # Examples: # url(r'^$', 'Dashboard.views.home', name='home'), # url(r'^Dashboard/', include('Dashboard.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^$', 'apps.web.adminboard.views.index', name='index'), url(r'^ajax/calcAsap$', 'apps.web.adminboard.views.ajax_calc_asap', name='ajax_calc_asap'), url(r'^ajax/loadData$', 'apps.web.adminboard.views.ajax_load_data', name='ajax_load_data'), url(r'^ajax/loadSiteCheckingDetails$', 'apps.web.adminboard.views.ajax_load_site_checking_details', name='ajax_load_site_checking_details'), #url(r'^add_site$', 'apps.web.adminboard.views.add_site', name='add_site'), url(r'^edit_site$', 'apps.web.adminboard.views.edit_site', name="edit_site"), url(r'^add_user$', 'apps.web.adminboard.views.add_user', name="add_user"), url(r'^edit_user$', 'apps.web.adminboard.views.edit_user', name="edit_user"), url(r'^user_list$', 'apps.web.adminboard.views.user_list', name="user_list"), url(r'^site_checking_details$', 'apps.web.adminboard.views.site_checking_details', name="site_checking_details"), url(r'^login$', 'apps.web.adminboard.views.login', name='login'), url(r'^logout$', 'apps.web.adminboard.views.logout', name='logout'), )
63.892857
125
0.594746
218
1,789
4.711009
0.224771
0.062317
0.198637
0.257059
0.269718
0.058423
0
0
0
0
0
0
0.238122
1,789
27
126
66.259259
0.753485
0.235327
0
0
0
0
0.508462
0.384106
0
0
0
0
0
1
0
false
0
0.066667
0
0.066667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
034827f1740a5c334b57668abe022dcf40cee88f
1,488
py
Python
petsi/plugins/tokencounter/__init__.py
vadaszd/petsi
9db924d7214f28d1fe47c83f760f90becf9d1577
[ "MIT" ]
null
null
null
petsi/plugins/tokencounter/__init__.py
vadaszd/petsi
9db924d7214f28d1fe47c83f760f90becf9d1577
[ "MIT" ]
null
null
null
petsi/plugins/tokencounter/__init__.py
vadaszd/petsi
9db924d7214f28d1fe47c83f760f90becf9d1577
[ "MIT" ]
null
null
null
""" A plugin that collects by-place token-count stats. .. rubric:: Public package interface - Class :class:`TokenCounterPlugin` (see below) .. rubric:: Internal submodules .. autosummary:: :template: module_reference.rst :recursive: :toctree: petsi.plugins.tokencounter._tokencounter """ from dataclasses import dataclass from typing import Optional, TYPE_CHECKING from ...util import export from ..meters import MeterPlugin from ._tokencounter import TokenCounterCollector, TokenCounterPluginPlaceObserver if TYPE_CHECKING: from ..interface import NoopTokenObserver, NoopTransitionObserver from ..._structure import Place @export @dataclass(eq=False) class TokenCounterPlugin( MeterPlugin["TokenCounterCollector", "TokenCounterPluginPlaceObserver", "NoopTransitionObserver", "NoopTokenObserver"]): """ A PetSi plugin providing by-place token-count stats The plugin collects the empirical distribution of the time-weighted token counts at all places of the observed Petri net, i.e. in what percentage of time the token count is i at place j. """ def __post_init__(self): self._collector = TokenCounterCollector(self._n) def place_observer_factory(self, p: "Place") -> Optional[TokenCounterPluginPlaceObserver]: return TokenCounterPluginPlaceObserver(self, p, self._clock, self._collector) \ if self._places is None or p.ordinal in self._places else None
31
94
0.737231
161
1,488
6.695652
0.52795
0.027829
0.022263
0.03154
0.040816
0
0
0
0
0
0
0
0.18414
1,488
47
95
31.659574
0.887974
0.362903
0
0
0
0
0.106077
0.081768
0
0
0
0
0
1
0.111111
false
0
0.388889
0.055556
0.611111
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
0349bd015d8a2bcfd987f366bd74fb6da58356b7
123
py
Python
civet/extraction/side.py
FNNDSC/pycivet
440839d8dcb3413ec96d06945498a6a9ec710204
[ "MIT" ]
null
null
null
civet/extraction/side.py
FNNDSC/pycivet
440839d8dcb3413ec96d06945498a6a9ec710204
[ "MIT" ]
null
null
null
civet/extraction/side.py
FNNDSC/pycivet
440839d8dcb3413ec96d06945498a6a9ec710204
[ "MIT" ]
null
null
null
from enum import Enum class Side(Enum): """ Brain hemisphere side. """ LEFT = 'left' RIGHT = 'right'
12.3
26
0.552846
14
123
4.857143
0.642857
0
0
0
0
0
0
0
0
0
0
0
0.317073
123
9
27
13.666667
0.809524
0.178862
0
0
0
0
0.105882
0
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
034bd33e8c9726f9847b39e705d892ce318b7e79
1,251
py
Python
pygsti/_version.py
colibri-coruscans/pyGSTi
da54f4abf668a28476030528f81afa46a1fbba33
[ "Apache-2.0" ]
1
2021-12-19T15:11:09.000Z
2021-12-19T15:11:09.000Z
pygsti/_version.py
colibri-coruscans/pyGSTi
da54f4abf668a28476030528f81afa46a1fbba33
[ "Apache-2.0" ]
null
null
null
pygsti/_version.py
colibri-coruscans/pyGSTi
da54f4abf668a28476030528f81afa46a1fbba33
[ "Apache-2.0" ]
null
null
null
#*************************************************************************************************** # Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS). # Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights # in this software. # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except # in compliance with the License. You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory. #*************************************************************************************************** """ Dynamic package versioning using setuptools_scm """ __version__ = "unknown" try: from pkg_resources import get_distribution, DistributionNotFound __version__ = get_distribution('pygsti').version except DistributionNotFound: # package not installed try: from inspect import getfile, currentframe this_file = getfile(currentframe()) from os.path import abspath from setuptools_scm import get_version __version__ = get_version(root='../..', relative_to=abspath(this_file)) except Exception: pass
46.333333
100
0.618705
138
1,251
5.449275
0.565217
0.053191
0.034574
0.042553
0
0
0
0
0
0
0
0.018061
0.159073
1,251
26
101
48.115385
0.696768
0.589928
0
0.153846
0
0
0.036145
0
0
0
0
0
0
1
0
false
0.076923
0.307692
0
0.307692
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
0
0
0
2
035e45c7033dc16603856320a879125084d43dcb
3,181
py
Python
whereami/migrations/0001_initial.py
martilidad/whereami_server
35455632cae43dbce2a8be148c57c66946b0f413
[ "MIT" ]
1
2020-05-13T11:46:29.000Z
2020-05-13T11:46:29.000Z
whereami/migrations/0001_initial.py
martilidad/whereami_server
35455632cae43dbce2a8be148c57c66946b0f413
[ "MIT" ]
40
2020-05-11T20:01:59.000Z
2022-03-19T21:58:32.000Z
whereami/migrations/0001_initial.py
martilidad/whereami_server
35455632cae43dbce2a8be148c57c66946b0f413
[ "MIT" ]
null
null
null
# Generated by Django 3.0.6 on 2020-05-07 11:56 import datetime from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Challenge', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('time', models.IntegerField()), ('pub_date', models.DateTimeField(blank=True, default=datetime.datetime.now)), ], ), migrations.CreateModel( name='Location', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=32)), ('lat', models.FloatField()), ('long', models.FloatField()), ('pub_date', models.DateTimeField(blank=True, default=datetime.datetime.now)), ], ), migrations.CreateModel( name='Game', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=32, unique=True)), ('locations', models.ManyToManyField(to='whereami.Location')), ], ), migrations.CreateModel( name='ChallengeLocation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('challenge', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='whereami.Challenge')), ('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='whereami.Location')), ], ), migrations.AddField( model_name='challenge', name='game', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='whereami.Game'), ), migrations.CreateModel( name='Guess', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('lat', models.FloatField()), ('long', models.FloatField()), ('score', models.IntegerField()), ('distance', models.IntegerField()), ('pub_date', models.DateTimeField(blank=True, default=datetime.datetime.now)), ('challenge_location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='whereami.ChallengeLocation')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'unique_together': {('user', 'challenge_location')}, }, ), ]
42.986486
137
0.564602
292
3,181
6.034247
0.267123
0.031782
0.047673
0.074915
0.648695
0.648695
0.604427
0.604427
0.604427
0.604427
0
0.008513
0.298334
3,181
73
138
43.575342
0.780914
0.014146
0
0.5
1
0
0.102908
0.008494
0
0
0
0
0
1
0
false
0
0.060606
0
0.121212
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0366f057aa58cacf483a76a18694bdc207d351b2
2,464
py
Python
machineLearning/KNN/knn.py
arpita221b/Hacktoberfest-2k19-1
6f682ea2226a8ce6f5a913da9ecdafff7a9fa5bd
[ "MIT" ]
1
2021-05-27T06:14:32.000Z
2021-05-27T06:14:32.000Z
machineLearning/KNN/knn.py
arpita221b/Hacktoberfest-2k19-1
6f682ea2226a8ce6f5a913da9ecdafff7a9fa5bd
[ "MIT" ]
null
null
null
machineLearning/KNN/knn.py
arpita221b/Hacktoberfest-2k19-1
6f682ea2226a8ce6f5a913da9ecdafff7a9fa5bd
[ "MIT" ]
null
null
null
""" A knn classifier predicts which class a point p should belong to. To do this we loop through all the points and find the distance of p from each point. We then sort these indices ie., we get an array with the sorted indices(find_nearest_neighbours). We then select the first k of these indices and find to which class do majority of the k points lie using the majority votes function. knn_predict returns this class """ import numpy as np def distance(p1,p2): """ Returns the eucledian distance between two points p1 and p2. """ #TODO def majority_votes(votes): """ Votes is a list. Returns the vote (in votes) with maximum counts and a random maximum in case of a tie. Constructs the count of votes as a dictionary with keys as the votes and values as the counts of the votes. You may use library functions like mode in scipy.stats.mstats or write your own code. """ #TODO def find_nearest_neighbours(p, points, k=5): """) Find k nearest neighbours of p and return their indices p : Point to classify points : List of predictors """ #Finding nearest neighbours- pseudocode #loop over all the points # find the distance between p and every other point # sort the distances and return thek nearest points #TODO #classes to which the k points belong is given by outcomes def knn_predict(p, points, outcomes, k=5): """ Use find_nearest_neighbors and majority_votes to predict the class to which the class p belongs p : Point to classify points : List of predictors outcomes : List Containing the possible outcomes/targets. """ #TODO ### kNN on the iris dataset ### ## Load the IRIS dataset. from sklearn import datasets iris = datasets.load_iris() predictors = iris.data outcomes = iris.target ## Using sklearn's KNN Classifier ## from sklearn.neighbors import KNeighborsClassifier knn = KNeighborsClassifier(n_neighbors = 5) knn.fit(predictors, outcomes) sk_predictions = knn.predict(predictors) ## Using the classifier we built my_predictions = np.array([knn_predict(p, predictors, outcomes, k=5) for p in predictors]) # Test and compare our model np.mean(my_predictions == sk_predictions) np.mean(my_predictions == outcomes) np.mean(sk_predictions == outcomes)
22.605505
125
0.681006
353
2,464
4.699717
0.368272
0.040989
0.014467
0.019289
0.045811
0.045811
0.045811
0.045811
0
0
0
0.004376
0.258117
2,464
109
126
22.605505
0.903173
0.59375
0
0
0
0
0
0
0
0
0
0.036697
0
1
0.235294
false
0
0.176471
0
0.411765
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
0
0
0
2
0370d568390c49c03bb1c89371762fa802ccdfe1
1,083
py
Python
pybrain/optimization/distributionbased/distributionbased.py
bayerj/pybrain
cfef28152bd60cedfdae5390c599d4fe4d2ec095
[ "BSD-3-Clause" ]
2
2016-01-28T21:46:38.000Z
2018-07-12T04:21:33.000Z
pybrain/optimization/distributionbased/distributionbased.py
bayerj/pybrain
cfef28152bd60cedfdae5390c599d4fe4d2ec095
[ "BSD-3-Clause" ]
null
null
null
pybrain/optimization/distributionbased/distributionbased.py
bayerj/pybrain
cfef28152bd60cedfdae5390c599d4fe4d2ec095
[ "BSD-3-Clause" ]
null
null
null
__author__ = 'Tom Schaul, tom@idsia.ch' from pybrain.optimization.optimizer import ContinuousOptimizer class DistributionBasedOptimizer(ContinuousOptimizer): """ The parent class for all optimization algorithms that are based on iteratively updating a search distribution. Provides a number of potentially useful methods that could be used by subclasses. """ online = False batchSize = 100 # distribution types GAUSSIAN = 1 CAUCHY = 2 GENERALIZEDGAUSSIAN = 3 STUDENTT = 4 distributionType = GAUSSIAN def _updateDistribution(self, dparamDeltas): """ Update the parameters of the current distribution, directly. """ def _generateSample(self): """ Generate 1 sample from the current distribution. """ def _generateConformingBatch(self): """ Generate a batch of samples that conforms to the current distribution. If importance mixing is enabled, this can reuse old samples. """
30.942857
90
0.644506
106
1,083
6.518868
0.726415
0.043415
0.095514
0
0
0
0
0
0
0
0
0.01054
0.299169
1,083
35
91
30.942857
0.899868
0.425669
0
0
0
0
0.044118
0
0
0
0
0
0
1
0.230769
false
0
0.076923
0
0.923077
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
03775b6ccf07b45d15b781d847fdb3d9cc0ffe90
1,793
py
Python
rdfframes/query_buffer/query_operators/shared/integer_count_node.py
qcri/RDFframe
2a50105479051c134cc5eddc9e20d55b755ef765
[ "MIT" ]
13
2019-07-06T00:10:11.000Z
2022-02-20T02:14:16.000Z
rdfframes/query_buffer/query_operators/shared/integer_count_node.py
qcri/RDFrame
2a50105479051c134cc5eddc9e20d55b755ef765
[ "MIT" ]
1
2019-05-20T08:51:42.000Z
2019-05-20T08:51:42.000Z
rdfframes/query_buffer/query_operators/shared/integer_count_node.py
qcri/RDFframe
2a50105479051c134cc5eddc9e20d55b755ef765
[ "MIT" ]
3
2020-04-17T10:50:37.000Z
2022-03-23T01:30:16.000Z
from rdfframes.query_buffer.query_operators.query_queue_operator import QueryQueueOperator from rdfframes.utils.constants import AggregationFunction __author__ = """ Abdurrahman Ghanem <abghanem@hbku.edu.qa> Aisha Mohamed <ahmohamed@qf.org.qa> """ class IntegerCountOperator(QueryQueueOperator): def __init__(self, src_dataset_name, agg_tag=None, agg_param=None): """ Represents an aggregation function on a normal dataset like SUM, AVG ... etc :param src_dataset_name: the dataset name where the source column resides :param agg_tag: aggregation function alias :param agg_param: aggregation parameter like distinct with count """ super(IntegerCountOperator, self).__init__(src_dataset_name) self.function = AggregationFunction.COUNT self.agg_tag = agg_tag if agg_tag is not None else '{}_{}'.format(src_col_name, agg_fn) self.agg_parameter = agg_param self._id = self.create_id() def create_id(self): return '{}.all'.format(self.function) def parent_ids(self): raise Exception("COUNT ALL column has not parent") def node_adds_col(self): return False def operation_name(self): return self.function def visit_node(self, query_model, ds, parent): query_model.add_aggregate_pair("*", self.function, self.agg_tag, self.agg_parameter) query_model.auto_add_select_column(self.agg_tag) return ds, query_model, None def __repr__(self): return '''Aggregation Node: src_ds: {} src_col: all agg_function: {} agg_parameters: {} agg_tag: {}'''.format(self.src_dataset_name, self.function, self.agg_parameter, self.agg_tag)
36.591837
95
0.675962
223
1,793
5.130045
0.394619
0.047203
0.048951
0.031469
0.045455
0
0
0
0
0
0
0
0.234802
1,793
48
96
37.354167
0.833819
0.143893
0
0
0
0
0.182246
0.028917
0
0
0
0
0
1
0.212121
false
0
0.060606
0.121212
0.454545
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
037b5142e2b0b7f3e916efd4ce41e1969de08fad
2,348
py
Python
support/prepend_copyright.py
electronicvisions/nux
a17807e63787124f2f64e3b4d1a2f3ed25c6c3bc
[ "Apache-2.0" ]
7
2015-10-13T10:38:44.000Z
2020-07-02T02:26:14.000Z
support/prepend_copyright.py
electronicvisions/nux
a17807e63787124f2f64e3b4d1a2f3ed25c6c3bc
[ "Apache-2.0" ]
1
2018-01-04T14:21:20.000Z
2018-01-04T14:29:17.000Z
support/prepend_copyright.py
electronicvisions/nux
a17807e63787124f2f64e3b4d1a2f3ed25c6c3bc
[ "Apache-2.0" ]
3
2016-03-05T19:13:41.000Z
2018-01-03T11:14:50.000Z
#!/usr/bin/env python # Copyright 2015 Heidelberg University Copyright and related rights are # licensed under the Solderpad Hardware License, Version 0.51 (the "License"); # you may not use this file except in compliance with the License. You may obtain # a copy of the License at http://solderpad.org/licenses/SHL-0.51. Unless # required by applicable law or agreed to in writing, software, hardware and # materials distributed under this License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See # the License for the specific language governing permissions and limitations # under the License. import sys import re filename = sys.argv[1] comment = sys.argv[2] notice = """%s Copyright 2015 Heidelberg University Copyright and related rights are %s licensed under the Solderpad Hardware License, Version 0.51 (the "License"); %s you may not use this file except in compliance with the License. You may obtain %s a copy of the License at http://solderpad.org/licenses/SHL-0.51. Unless %s required by applicable law or agreed to in writing, software, hardware and %s materials distributed under this License is distributed on an "AS IS" BASIS, %s WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See %s the License for the specific language governing permissions and limitations %s under the License. """ % (comment, comment,comment, comment, comment, comment, comment, comment, comment) use_m4_pattern = re.compile(r'/\* _use_m4_([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/') notice_pattern = re.compile(r'// Copyright 2015 Heidelberg(.*)under the License\.', re.DOTALL) with open(filename, 'r') as f: orig = f.read() found_notice = notice_pattern.search(orig) if not found_notice: with open(filename, 'w') as f2: found_m4 = use_m4_pattern.search(orig) if found_m4: #print "found m4: ", found_m4.group(0) f2.write(found_m4.group(0)) f2.write('\n\n' + notice + '\n\n') f2.write(use_m4_pattern.sub('', orig)) print "Prepended '%s' with copyright notice using M4 magic string." % (filename) else: f2.write(notice + '\n\n' + orig) print "Prepended '%s' with copyright notice." % (filename)
46.039216
96
0.691227
343
2,348
4.682216
0.309038
0.068493
0.091532
0.104608
0.724159
0.724159
0.699253
0.65193
0.612702
0.536737
0
0.022982
0.203152
2,348
50
97
46.96
0.835382
0.289608
0
0
0
0.033333
0.519928
0.027778
0
0
0
0
0
0
null
null
0
0.066667
null
null
0.066667
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
cee779415f91f7a20b8a7d5f817c4ccf49d4847f
491
py
Python
commands.py
anujaagarwal/Family-Tree-In-Python
46742cdb3dabfa2cc190f4f2e32a51b09f1897c9
[ "MIT" ]
null
null
null
commands.py
anujaagarwal/Family-Tree-In-Python
46742cdb3dabfa2cc190f4f2e32a51b09f1897c9
[ "MIT" ]
null
null
null
commands.py
anujaagarwal/Family-Tree-In-Python
46742cdb3dabfa2cc190f4f2e32a51b09f1897c9
[ "MIT" ]
null
null
null
# Testcases covering all information GET_RELATIONSHIP Arit Brother-In-Law GET_RELATIONSHIP Yodhan Maternal-Aunt GET_RELATIONSHIP Laki Maternal-Uncle GET_RELATIONSHIP Dritha SIBLINGS GET_RELATIONSHIP Ahit Paternal-Uncle GET_RELATIONSHIP Ahit Paternal-Aunt GET_RELATIONSHIP Atya Sister-In-Law GET_RELATIONSHIP Arit Son GET_RELATIONSHIP Lika Daughter GiT JHSH GET_RELATIONSHIP Lavnya Daughter ADD_CHILD Chitra Aria Female GET_RELATIONSHIP Lavnya Maternal-Aunt GET_RELATIONSHIP Aria Siblings
27.277778
37
0.8778
68
491
6.147059
0.470588
0.430622
0.136364
0.095694
0
0
0
0
0
0
0
0
0.101833
491
17
38
28.882353
0.947846
0.069246
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
2
cefccbc526941bdf4c71dd85a0cd2204e5c80d91
490
py
Python
config.py
Flowdeeps/flatearth
cb834a88f0ae691470932e8ae815bb087b9b279c
[ "MIT" ]
1
2016-06-14T07:53:53.000Z
2016-06-14T07:53:53.000Z
config.py
Flowdeeps/flatearth
cb834a88f0ae691470932e8ae815bb087b9b279c
[ "MIT" ]
null
null
null
config.py
Flowdeeps/flatearth
cb834a88f0ae691470932e8ae815bb087b9b279c
[ "MIT" ]
null
null
null
#!/usr/bin/env python import configparser def getconfig ( input ): config = configparser.ConfigParser() config.sections() config.read( input ) config.sections() config_dict={} for section in config: for var in config[ section ]: config_dict[ var ] = config[ section ][ var ] if ( config_dict[ "output_string" ] != None ): config_dict[ "output_string" ] = "<!-- " + config_dict[ "output_string" ] + " -->" return config_dict
23.333333
90
0.608163
53
490
5.45283
0.415094
0.207612
0.16609
0.228374
0
0
0
0
0
0
0
0
0.257143
490
20
91
24.5
0.793956
0.040816
0
0.153846
0
0
0.102345
0
0
0
0
0
0
1
0.076923
false
0
0.076923
0
0.230769
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
3009e7be1a03d369e01448072d0f99605ee02fa6
238
py
Python
soane/__init__.py
spheten/soane
b5517275b8b3fd3b2b5a19b031c98cfd45d42292
[ "BSD-3-Clause" ]
1
2021-10-03T07:13:55.000Z
2021-10-03T07:13:55.000Z
soane/__init__.py
spheten/soane
b5517275b8b3fd3b2b5a19b031c98cfd45d42292
[ "BSD-3-Clause" ]
14
2021-10-03T07:10:10.000Z
2021-10-06T09:07:41.000Z
soane/__init__.py
spheten/soane
b5517275b8b3fd3b2b5a19b031c98cfd45d42292
[ "BSD-3-Clause" ]
null
null
null
''' Package definition for 'soane'. ''' VERSION_DATE = '2021-10-03' VERSION_NUMBER = '0.1.0' VERSION_STRING = f'Soane version {VERSION_NUMBER} ({VERSION_DATE}).' from soane import comms from soane import items from soane import tools
19.833333
68
0.735294
35
238
4.857143
0.542857
0.158824
0.264706
0
0
0
0
0
0
0
0
0.053922
0.142857
238
11
69
21.636364
0.779412
0.130252
0
0
0
0
0.316583
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
2
300a1d81801bc773de7b77a492fd7008debdae07
8,511
py
Python
tomviz/python/tomviz/utils.py
utkarshayachit/tomviz
02efc26a14965602cced2343fe6d058c4a0ff903
[ "BSD-3-Clause" ]
null
null
null
tomviz/python/tomviz/utils.py
utkarshayachit/tomviz
02efc26a14965602cced2343fe6d058c4a0ff903
[ "BSD-3-Clause" ]
null
null
null
tomviz/python/tomviz/utils.py
utkarshayachit/tomviz
02efc26a14965602cced2343fe6d058c4a0ff903
[ "BSD-3-Clause" ]
null
null
null
import numpy as np import vtk.numpy_interface.dataset_adapter as dsa import vtk.util.numpy_support as np_s import itk # Dictionary going from VTK array type to ITK type vtk_to_itk_types = {} def setup_vtk_itk_map(): """Try to set up mappings between VTK image types and ITK image types. Not all ITK image types may be available, hence the try statements.""" try: vtk_to_itk_types['vtkUnsignedCharArray'] = itk.Image.UC3 except: pass try: vtk_to_itk_types['vtkCharArray'] = itk.Image.SC3 except: pass try: vtk_to_itk_types['vtkUnsignedShortArray'] = itk.Image.US3 except: pass try: vtk_to_itk_types['vtkShortArray'] = itk.Image.SS3 except: pass try: vtk_to_itk_types['vtkUnsignedIntArray'] = itk.Image.UI3 except: pass try: vtk_to_itk_types['vtkIntArray'] = itk.Image.SI3 except: pass try: vtk_to_itk_types['vtFloatArray'] = itk.Image.F3 except: pass try: vtk_to_itk_types['vtkDoubleArray'] = itk.Image.D3 except: pass # Set up the VTK-to-ITK image type mapping setup_vtk_itk_map() def get_itk_image_type(vtk_image_data): """Get an ITK image type corresponding to the provided vtkImageData object.""" image_type = None # Get the scalars pd = vtk_image_data.GetPointData() scalars = pd.GetScalars() vtk_class_name = scalars.GetClassName() try: image_type = vtk_to_itk_types[vtk_class_name] except: raise Exception('No ITK type known for %s' % vtk_class_name) return image_type def convert_vtk_to_itk_image(vtk_image_data): """Get an ITK image from the provided vtkImageData object. This image can be passed to ITK filters.""" image_type = get_itk_image_type(vtk_image_data) # Save the VTKGlue optimization for later #------------------------------------------ #itk_import = itk.VTKImageToImageFilter[image_type].New() #itk_import.SetInput(vtk_image_data) #itk_import.Update() #itk_image = itk_import.GetOutput() #itk_image.DisconnectPipeline() #------------------------------------------ array = get_array(vtk_image_data) itk_converter = itk.PyBuffer[image_type] itk_image = itk_converter.GetImageFromArray(array) return itk_image def add_vtk_array_from_itk_image(itk_image_data, vtk_image_data, name): """Add an array from an ITK image to a vtkImageData with a given name.""" itk_output_image_type = type(itk_image_data) # Save the VTKGlue optimization for later #------------------------------------------ # Export the ITK image to a VTK image. No copying should take place. #export_filter = itk.ImageToVTKImageFilter[itk_output_image_type].New() #export_filter.SetInput(itk_image_data) #export_filter.Update() # Get scalars from the temporary image and copy them to the data set #result_image = export_filter.GetOutput() #filter_array = result_image.GetPointData().GetArray(0) # Make a new instance of the array that will stick around after this # filters in this script are garbage collected #new_array = filter_array.NewInstance() #new_array.DeepCopy(filter_array) # Should be able to shallow copy? #new_array.SetName(name) # Set a new point data array in the dataset #vtk_image_data.GetPointData().AddArray(new_array) #------------------------------------------ result = itk.PyBuffer[itk_output_image_type].GetArrayFromImage(itk_image_data) set_label_map(vtk_image_data, result) def get_scalars(dataobject): do = dsa.WrapDataObject(dataobject) # get the first rawarray = do.PointData.GetScalars() vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do) vtkarray.Association = dsa.ArrayAssociation.POINT return vtkarray def set_scalars(dataobject, newscalars): do = dsa.WrapDataObject(dataobject) oldscalars = do.PointData.GetScalars() name = oldscalars.GetName() del oldscalars # handle the case if the newscalars array has a type that # cannot be passed on to VTK. In which case, we convert to # convert to float64 vtk_typecode = np_s.get_vtk_array_type(newscalars.dtype) if vtk_typecode is None: newscalars = newscalars.astype(np.float64) do.PointData.append(newscalars, name) do.PointData.SetActiveScalars(name) def get_array(dataobject): scalars_array = get_scalars(dataobject) scalars_array3d = np.reshape(scalars_array, (dataobject.GetDimensions()), order='F') return scalars_array3d def set_array(dataobject, newarray): # Ensure we have Fortran ordered flat array to assign to image data. This # is ideally done without additional copies, but if C order we must copy. if np.isfortran(newarray): arr = newarray.reshape(-1, order='F') else: print 'Warning, array does not have Fortran order, making deep copy and fixing...' tmp = np.asfortranarray(newarray) arr = tmp.reshape(-1, order='F') print '...done.' # Set the extents (they may have changed). dataobject.SetExtent(0, newarray.shape[0] - 1, 0, newarray.shape[1] - 1, 0, newarray.shape[2] - 1) # Now replace the scalars array with the new array. vtkarray = np_s.numpy_to_vtk(arr) vtkarray.Association = dsa.ArrayAssociation.POINT do = dsa.WrapDataObject(dataobject) oldscalars = do.PointData.GetScalars() name = oldscalars.GetName() del oldscalars do.PointData.append(arr, name) do.PointData.SetActiveScalars(name) def set_label_map(dataobject, labelarray): # Ensure we have Fortran ordered flat array to assign to image data. This # is ideally done without additional copies, but if C order we must copy. if np.isfortran(labelarray): arr = labelarray.reshape(-1, order='F') else: print 'Warning, array does not have Fortran order, making deep copy and fixing...' tmp = np.asfortranarray(labelarray) arr = tmp.reshape(-1, order='F') print '...done.' # Now add the label array to the image data vtkarray = np_s.numpy_to_vtk(arr) vtkarray.Association = dsa.ArrayAssociation.POINT do = dsa.WrapDataObject(dataobject) do.PointData.append(arr, "LabelMap") do.PointData.AddArray(vtkarray) def get_tilt_angles(dataobject): # Get the tilt angles array do = dsa.WrapDataObject(dataobject) rawarray = do.FieldData.GetArray('tilt_angles') vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do) vtkarray.Association = dsa.ArrayAssociation.FIELD return vtkarray def set_tilt_angles(dataobject, newarray): # replace the tilt angles with the new array from vtk import VTK_DOUBLE # deep copy avoids having to keep numpy array around, but is more # expensive. I don't expect tilt_angles to be a big array though. vtkarray = np_s.numpy_to_vtk(newarray, deep=1, array_type=VTK_DOUBLE) vtkarray.Association = dsa.ArrayAssociation.FIELD vtkarray.SetName('tilt_angles') do = dsa.WrapDataObject(dataobject) do.FieldData.RemoveArray('tilt_angles') do.FieldData.AddArray(vtkarray) def make_dataset(x, y, z, dataset, generate_data_function): from vtk import vtkImageData, VTK_DOUBLE array = np.zeros((x,y,z), order='F') generate_data_function(array) dataset.SetOrigin(0,0,0) dataset.SetSpacing(1,1,1) dataset.SetExtent(0, x-1, 0, y-1, 0, z-1) flat_array = array.reshape(-1, order='F') vtkarray = np_s.numpy_to_vtk(flat_array, deep=1, array_type=VTK_DOUBLE) vtkarray.SetName("generated_scalars") dataset.GetPointData().SetScalars(vtkarray) def mark_as_volume(dataobject): from vtk import vtkTypeInt8Array fd = dataobject.GetFieldData() arr = fd.GetArray("tomviz_data_source_type") if arr is None: arr = vtkTypeInt8Array() arr.SetNumberOfComponents(1) arr.SetNumberOfTuples(1) arr.SetName("tomviz_data_source_type") fd.AddArray(arr) arr.SetTuple1(0, 0) def mark_as_tiltseries(dataobject): from vtk import vtkTypeInt8Array fd = dataobject.GetFieldData() arr = fd.GetArray("tomviz_data_source_type") if arr is None: arr = vtkTypeInt8Array() arr.SetNumberOfComponents(1) arr.SetNumberOfTuples(1) arr.SetName("tomviz_data_source_type") fd.AddArray(arr) arr.SetTuple1(0, 1)
34.180723
90
0.683938
1,122
8,511
5.01426
0.21836
0.038393
0.017064
0.023107
0.380732
0.357981
0.337007
0.278884
0.253288
0.224493
0
0.008471
0.209376
8,511
248
91
34.318548
0.827612
0.232523
0
0.484277
0
0
0.076746
0.01857
0
0
0
0
0
0
null
null
0.050314
0.050314
null
null
0.025157
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
300c798493d1e82afe47c5c6bc306c0188408a23
557
py
Python
tests/test_models/test_general_gnn.py
JonaBecher/spektral
ff59e16d959e0ec698428997363be20462625699
[ "MIT" ]
2,145
2019-01-21T20:49:44.000Z
2022-03-28T20:27:27.000Z
tests/test_models/test_general_gnn.py
jasper-park/spektral
ad2d96549c00f68ce992a7d29e2c3fd025fb529b
[ "MIT" ]
259
2019-01-22T05:18:19.000Z
2022-03-25T10:46:10.000Z
tests/test_models/test_general_gnn.py
jasper-park/spektral
ad2d96549c00f68ce992a7d29e2c3fd025fb529b
[ "MIT" ]
322
2019-02-11T16:18:27.000Z
2022-03-24T16:26:59.000Z
from spektral import models from tests.test_models.core import MODES, run_model config = { "model": models.GeneralGNN, "modes": [MODES["SINGLE"], MODES["DISJOINT"], MODES["MIXED"]], "kwargs": {"output": 32, "connectivity": "cat", "pool": "sum"}, "edges": False, "dense": False, "sparse": True, } def test_model(): run_model(config) config["kwargs"]["pool"] = None run_model(config) config["kwargs"]["connectivity"] = "sum" run_model(config) config["kwargs"]["connectivity"] = None run_model(config)
22.28
67
0.624776
64
557
5.328125
0.453125
0.117302
0.205279
0.175953
0.29912
0.222874
0
0
0
0
0
0.004425
0.18851
557
24
68
23.208333
0.75
0
0
0.222222
0
0
0.229803
0
0
0
0
0
0
1
0.055556
false
0
0.111111
0
0.166667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
301329597f00de6f72f6e7f2bfa64758f2412e22
574
py
Python
tests/test_krow.py
AhmadNaruto/pyromenu
74f822771fd41bab74d8d0b9bb1e30222e55c74f
[ "MIT" ]
1
2022-01-30T16:27:00.000Z
2022-01-30T16:27:00.000Z
tests/test_krow.py
AhmadNaruto/pyromenu
74f822771fd41bab74d8d0b9bb1e30222e55c74f
[ "MIT" ]
null
null
null
tests/test_krow.py
AhmadNaruto/pyromenu
74f822771fd41bab74d8d0b9bb1e30222e55c74f
[ "MIT" ]
null
null
null
from typing import List from unittest import TestCase from pyrogram import KeyboardButton from pyromenu import KRow from pyromenu.fakes import FakeButton class KRowTests(TestCase): """KRow class test case""" def setUp(self): self.trow = KRow(FakeButton(), FakeButton()) def test_keyboard_row(self): """keyboard_row method implementation test""" tkeyboard_row = self.trow.keyboard_row() self.assertIsInstance(tkeyboard_row, List) for button in tkeyboard_row: self.assertIsInstance(button, KeyboardButton)
26.090909
57
0.714286
66
574
6.106061
0.439394
0.069479
0.074442
0
0
0
0
0
0
0
0
0
0.207317
574
21
58
27.333333
0.885714
0.10453
0
0
0
0
0
0
0
0
0
0
0.153846
1
0.153846
false
0
0.384615
0
0.615385
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
3013a5d8c455a21df689d3f31a5366f14c0c600d
472
py
Python
src/optim/__init__.py
pzzhang/sasa
e663d7666e85de8e5a7a664a6b37d988008ab007
[ "MIT" ]
1
2020-01-28T15:22:16.000Z
2020-01-28T15:22:16.000Z
src/optim/__init__.py
pzzhang/sasa
e663d7666e85de8e5a7a664a6b37d988008ab007
[ "MIT" ]
null
null
null
src/optim/__init__.py
pzzhang/sasa
e663d7666e85de8e5a7a664a6b37d988008ab007
[ "MIT" ]
1
2021-06-10T05:04:24.000Z
2021-06-10T05:04:24.000Z
# Copyright (c) Microsoft. All rights reserved. """ :mod:`torch.optim` is a package implementing various optimization algorithms. Most commonly used methods are already supported, and the interface is general enough, so that more sophisticated ones can be also easily integrated in the future. """ from .qhm import QHM from .salsa import SALSA from .sasa_xd import SASA_xd from .sasa_yaida import SASAYaida from .sgd_sls import SGD_SLS from .yaida_baseline import Yaida
29.5
78
0.798729
72
472
5.152778
0.708333
0.043127
0
0
0
0
0
0
0
0
0
0
0.146186
472
15
79
31.466667
0.920596
0.610169
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
2
3013b8932c06fdbcb2644a263893143714ac0d64
1,536
py
Python
Python/url_validator/url_validator.py
vietnguyen1989/UDx-Examples
4b72aaa7402c93890500d37c1b888230cc6524e2
[ "BSD-3-Clause" ]
12
2017-08-16T09:58:50.000Z
2020-05-08T12:03:57.000Z
Python/url_validator/url_validator.py
vietnguyen1989/UDx-Examples
4b72aaa7402c93890500d37c1b888230cc6524e2
[ "BSD-3-Clause" ]
1
2021-04-21T21:39:03.000Z
2021-04-21T21:39:03.000Z
Python/url_validator/url_validator.py
vietnguyen1989/UDx-Examples
4b72aaa7402c93890500d37c1b888230cc6524e2
[ "BSD-3-Clause" ]
15
2016-12-21T09:57:22.000Z
2021-11-25T22:30:24.000Z
import vertica_sdk import urllib.request import time class validate_url(vertica_sdk.ScalarFunction): """Validates HTTP requests. Returns the status code of a webpage. Pages that cannot be accessed return "Failed to load page." """ def __init__(self): pass def setup(self, server_interface, col_types): pass def processBlock(self, server_interface, arg_reader, res_writer): # Writes a string to the UDx log file. server_interface.log("Validating URL Accessibility - UDx") while(True): url = arg_reader.getString(0) try: status = urllib.request.urlopen(url).getcode() # Avoid overwhelming web servers -- be nice. time.sleep(2) except (ValueError, urllib.error.HTTPError, urllib.error.URLError): status = 'Failed to load page' res_writer.setString(str(status)) res_writer.next() if not arg_reader.next(): # Stop processing when there are no more input rows. break def destroy(self, server_interface, col_types): pass class validate_url_factory(vertica_sdk.ScalarFunctionFactory): def createScalarFunction(self, srv): return test_url() def getPrototype(self, srv_interface, arg_types, return_type): arg_types.addVarchar() return_type.addChar() def getReturnType(self, srv_interface, arg_types, return_type): return_type.addChar(20)
30.117647
79
0.638672
179
1,536
5.301676
0.541899
0.063224
0.060063
0.03372
0.136986
0.136986
0.071654
0
0
0
0
0.003623
0.28125
1,536
50
80
30.72
0.855978
0.166016
0
0.096774
0
0
0.042231
0
0
0
0
0
0
1
0.225806
false
0.096774
0.096774
0.032258
0.419355
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
2
3054517a8a4e7afa09e7b46f709d855947dc6bc7
1,130
py
Python
src/wai/annotations/xdc/od_to_is/specifier/_OD2ISXDCSpecifier.py
waikato-ufdl/wai-annotations-core
bac3429e9488efb456972c74f9d462f951c4af3d
[ "Apache-2.0" ]
null
null
null
src/wai/annotations/xdc/od_to_is/specifier/_OD2ISXDCSpecifier.py
waikato-ufdl/wai-annotations-core
bac3429e9488efb456972c74f9d462f951c4af3d
[ "Apache-2.0" ]
3
2021-06-30T23:42:47.000Z
2022-03-01T03:45:07.000Z
src/wai/annotations/xdc/od_to_is/specifier/_OD2ISXDCSpecifier.py
waikato-ufdl/wai-annotations-core
bac3429e9488efb456972c74f9d462f951c4af3d
[ "Apache-2.0" ]
null
null
null
from typing import Tuple, Type from ....core.component import Component from ....core.domain import DomainSpecifier from ....core.specifier import ProcessorStageSpecifier class OD2ISXDCSpecifier(ProcessorStageSpecifier): """ Specifies the image object-detection -> image segmentation cross-domain converter. """ @classmethod def description(cls) -> str: return "Converts image object-detection instances into image segmentation instances" @classmethod def domain_transfer_function(cls, input_domain: Type[DomainSpecifier]) -> Type[DomainSpecifier]: from ....domain.image.object_detection import ImageObjectDetectionDomainSpecifier from ....domain.image.segmentation import ImageSegmentationDomainSpecifier if input_domain is ImageObjectDetectionDomainSpecifier: return ImageSegmentationDomainSpecifier else: raise Exception("OD -> IS XDC can only handle the image object-detection domain") @classmethod def components(cls) -> Tuple[Type[Component], ...]: from ..component import OD2ISXDC return OD2ISXDC,
37.666667
100
0.730973
107
1,130
7.672897
0.439252
0.053593
0.097442
0.056029
0
0
0
0
0
0
0
0.003279
0.190265
1,130
29
101
38.965517
0.893989
0.072566
0
0.15
0
0
0.133268
0
0
0
0
0
0
1
0.15
false
0
0.35
0.05
0.7
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
305669d5841dedd551c97b349d13fcfc315086a9
410
py
Python
bakery/main.py
AtanasovAtanas/python-advanced-jan-22
701c5a6e2c934d010250cbc4a1c72222afb3b897
[ "MIT" ]
2
2022-03-28T10:05:51.000Z
2022-03-31T04:36:30.000Z
bakery/main.py
AtanasovAtanas/python-advanced-jan-22
701c5a6e2c934d010250cbc4a1c72222afb3b897
[ "MIT" ]
null
null
null
bakery/main.py
AtanasovAtanas/python-advanced-jan-22
701c5a6e2c934d010250cbc4a1c72222afb3b897
[ "MIT" ]
3
2022-02-07T19:05:44.000Z
2022-03-21T11:34:11.000Z
from project.bakery import Bakery bakery = Bakery('Random Name') print(bakery.add_drink('Water', 'Mineral', 500, 'Bankya')) print(bakery.add_drink('Tea', 'Ice', 500, 'Nestle')) print(bakery.add_table('OutsideTable', 55, 15)) print(bakery.reserve_table(10)) print(bakery.order_drink(55, 'Spring', 'Mineral', 'Ice', 'Coke', 'Fanta')) print(bakery.leave_table(55)) print('---') print(bakery.get_total_income())
31.538462
74
0.717073
58
410
4.931034
0.517241
0.269231
0.146853
0.132867
0
0
0
0
0
0
0
0.042216
0.07561
410
13
75
31.538462
0.712401
0
0
0
0
0
0.19708
0
0
0
0
0
0
1
0
false
0
0.1
0
0.1
0.8
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
2
305b46b49e1bd63c70736f60b2c4f5924c497d72
679
py
Python
punchline/music/forms.py
ychab/punchline
0054fd1a0c588173c9963714477b012579c13051
[ "MIT" ]
null
null
null
punchline/music/forms.py
ychab/punchline
0054fd1a0c588173c9963714477b012579c13051
[ "MIT" ]
null
null
null
punchline/music/forms.py
ychab/punchline
0054fd1a0c588173c9963714477b012579c13051
[ "MIT" ]
null
null
null
from django import forms from dal import autocomplete from .models import Punchline, Song class SongAdminForm(forms.ModelForm): class Meta: model = Song fields = '__all__' widgets = { 'album': autocomplete.ModelSelect2(url='admin:album-autocomplete'), } class PunchlineAdminForm(forms.ModelForm): class Meta: model = Punchline fields = '__all__' widgets = { 'artist': autocomplete.ModelSelect2(url='admin:artist-autocomplete'), 'song': autocomplete.ModelSelect2( url='admin:song-autocomplete', forward=['artist'], ), }
22.633333
81
0.589102
58
679
6.758621
0.413793
0.183673
0.206633
0.244898
0.142857
0
0
0
0
0
0
0.006397
0.309278
679
29
82
23.413793
0.829424
0
0
0.285714
0
0
0.157585
0.106038
0
0
0
0
0
1
0
false
0
0.142857
0
0.333333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
3063cabc1fed643e256b296d0979708120e3a0ca
1,763
py
Python
cryomem/cmtools/lib/db_svjj_params.py
bebaek/cryomem
088fba2568d10451adda51a068c15c8c2a73d9ce
[ "MIT" ]
1
2018-09-16T12:29:04.000Z
2018-09-16T12:29:04.000Z
cryomem/cmtools/lib/db_svjj_params.py
bebaek/cryomem
088fba2568d10451adda51a068c15c8c2a73d9ce
[ "MIT" ]
null
null
null
cryomem/cmtools/lib/db_svjj_params.py
bebaek/cryomem
088fba2568d10451adda51a068c15c8c2a73d9ce
[ "MIT" ]
null
null
null
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 """ Manage database of SV JJ parameters File format: HDF5. BB, 2015 """ import numpy as np import tables as tb class JJParams(tb.IsDescription): wafer = tb.StringCol(16) chip = tb.StringCol(16) device = tb.StringCol(16) meastime = tb.StringCol(8) #meastime = tb.Time64() measequip = tb.StringCol(20) measmethod = tb.StringCol(50) shape = tb.StringCol(8) dimx = tb.Float64() dimy = tb.Float64() temperature = tb.Float64() ic_p = tb.Float64() ic_ap = tb.Float64() rn_p = tb.Float64() rn_ap = tb.Float64() class DB(): def __init__(**kwargs): if 'filename' in kwargs: self.open_db(kwargs['filename']) def open_db(): pass def close_db(): pass def add_jj(**kwargs): pass def del_jj(**kwargs): pass def edit_jj(**kwargs): pass def plot_svjj(filenames, **kwargs): whichplot = kwargs('whichplot', 'hic') if whichplot == 'hic': # H vs Ic ix = 1; iy = 3 for fn = in filenames: data = np.loadtxt(filename, def app(args): if len(args) < 2: print('') print('Usage: python plot_cryomem.py <filename> [<arguments>]\n') print('Examples:') print('python daq_dipstick.py sqwave_field h=150') print('python daq_dipstick.py set_sweepvolt .7') sys.exit(0) func = args[1] fn = args[2] if func == 'iv': pass elif func == 'hic': data = np.loadtxt(data, skiprows=1, usecols=(2,4)) plothyst.plothyst(data[0], data[1]) plt.show() if __name__ == '__main__': import sys print(sys.version) app(sys.argv)
22.0375
73
0.571185
231
1,763
4.238095
0.480519
0.078652
0.039837
0.045965
0.04903
0
0
0
0
0
0
0.040898
0.292683
1,763
79
74
22.316456
0.744186
0
0
0.105263
0
0
0.117684
0
0
0
0
0
0
0
null
null
0.105263
0.052632
null
null
0.105263
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
06200c91fb6913a5df458dd50d95e6229c41ba2c
775
py
Python
bar.py
lcolon7423/Bar-Pi
e271c63762fe96cfd8ba492286172b90159ec105
[ "MIT" ]
null
null
null
bar.py
lcolon7423/Bar-Pi
e271c63762fe96cfd8ba492286172b90159ec105
[ "MIT" ]
null
null
null
bar.py
lcolon7423/Bar-Pi
e271c63762fe96cfd8ba492286172b90159ec105
[ "MIT" ]
null
null
null
import socket from datetime import datetime from flask import Flask, url_for, request, render_template; from flask_bootstrap import Bootstrap from flask_sqlalchemy import SQLAlchemy import time, os app = Flask(__name__) Bootstrap(app) app.config['SECRET_KEY'] = '5791628bb0b13ce0c676dfde280ba245' app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///drinks.db' db=SQLAlchemy(app) wsgi_app = app.wsgi_app #import routes from routes file from routes import *; hostname = socket.gethostname() IPAddr = socket.gethostbyname(hostname) if __name__ == "__main__": HOST = os.environ.get('SERVER_HOST' ,IPAddr) try: PORT =int(os.environ.get('SERVER_PORT' , '3871')) except ValueError: PORT = 5555 app.debug = True app.run(HOST, PORT)
20.945946
61
0.730323
99
775
5.484848
0.484848
0.049724
0.036832
0.066298
0
0
0
0
0
0
0
0.043277
0.165161
775
36
62
21.527778
0.795981
0.03871
0
0
0
0
0.158816
0.074024
0
0
0
0
0
0
null
null
0
0.304348
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
2
0621c2214bf0f3444abcec6f9b6364b7a4fb3074
506
py
Python
dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ByteDataRecord.py
srcarter3/python-awips
d981062662968cf3fb105e8e23d955950ae2497e
[ "BSD-3-Clause" ]
33
2016-03-17T01:21:18.000Z
2022-02-08T10:41:06.000Z
dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ByteDataRecord.py
srcarter3/python-awips
d981062662968cf3fb105e8e23d955950ae2497e
[ "BSD-3-Clause" ]
15
2016-04-19T16:34:08.000Z
2020-09-09T19:57:54.000Z
dynamicserialize/dstypes/com/raytheon/uf/common/datastorage/records/ByteDataRecord.py
Unidata/python-awips
8459aa756816e5a45d2e5bea534d23d5b1dd1690
[ "BSD-3-Clause" ]
20
2016-03-12T01:46:58.000Z
2022-02-08T06:53:22.000Z
from dynamicserialize.dstypes.com.raytheon.uf.common.datastorage.records import AbstractDataRecord class ByteDataRecord(AbstractDataRecord): def __init__(self): super(ByteDataRecord, self).__init__() self.byteData = None def getByteData(self): return self.byteData def setByteData(self, byteData): self.byteData = byteData def retrieveDataObject(self): return self.getByteData() def putDataObject(self, obj): self.setByteData(obj)
24.095238
98
0.703557
50
506
6.96
0.5
0.137931
0.08046
0
0
0
0
0
0
0
0
0
0.209486
506
20
99
25.3
0.87
0
0
0
0
0
0
0
0
0
0
0
0
1
0.384615
false
0
0.076923
0.153846
0.692308
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
062574ff00016e22dca24ba75ea3d11eafe89913
13,310
py
Python
ml4tc/utils/satellite_utils_test.py
NOAA-GSL/ml4tc
e9f8faa51e5bfb86b2a78648d7b1d0e61d09b6c0
[ "MIT" ]
2
2021-08-24T04:24:22.000Z
2021-09-29T07:52:21.000Z
ml4tc/utils/satellite_utils_test.py
thunderhoser/ml4tc
dd97972675c462634cf43fa9ad486049429095e9
[ "MIT" ]
null
null
null
ml4tc/utils/satellite_utils_test.py
thunderhoser/ml4tc
dd97972675c462634cf43fa9ad486049429095e9
[ "MIT" ]
null
null
null
"""Unit tests for satellite_utils.py.""" import unittest import numpy from ml4tc.utils import satellite_utils TOLERANCE = 1e-6 # The following constants are used to test _find_storm_center_px_space. GRID_LATITUDES_DEG_N = numpy.array( [-10, -8, -6, -4, -2, 0, 3, 6, 9, 12, 20], dtype=float ) GRID_LONGITUDES_DEG_E = numpy.array( [350, 355, 0, 5, 10, 15, 25, 35, 45, 55, 65, 75], dtype=float ) FIRST_STORM_LATITUDE_DEG_N = 6.9 FIRST_STORM_LONGITUDE_DEG_E = 354.3 FIRST_STORM_ROW = 7.5 FIRST_STORM_COLUMN = 0.5 SECOND_STORM_LATITUDE_DEG_N = 16.7 SECOND_STORM_LONGITUDE_DEG_E = 26.3 SECOND_STORM_ROW = 9.5 SECOND_STORM_COLUMN = 6.5 THIRD_STORM_LATITUDE_DEG_N = 11.9 THIRD_STORM_LONGITUDE_DEG_E = 35.5 THIRD_STORM_ROW = 8.5 THIRD_STORM_COLUMN = 7.5 FOURTH_STORM_LATITUDE_DEG_N = -1.5 FOURTH_STORM_LONGITUDE_DEG_E = 51.4 FOURTH_STORM_ROW = 4.5 FOURTH_STORM_COLUMN = 8.5 FIFTH_STORM_LATITUDE_DEG_N = 15.1 FIFTH_STORM_LONGITUDE_DEG_E = 7.6 FIFTH_STORM_ROW = 9.5 FIFTH_STORM_COLUMN = 3.5 # The following constants are used to test _crop_image_around_storm_center. UNCROPPED_DATA_MATRIX = numpy.array([ [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], [13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], [25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36], [37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48], [49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60], [61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72], [73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84], [85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96], [97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108], [109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120], [121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132] ], dtype=float) NUM_CROPPED_ROWS = 4 NUM_CROPPED_COLUMNS = 6 FIRST_CROPPED_DATA_MATRIX = numpy.array([ [73, 73, 73, 74, 75, 76], [85, 85, 85, 86, 87, 88], [97, 97, 97, 98, 99, 100], [109, 109, 109, 110, 111, 112] ], dtype=float) FIRST_CROPPED_LATITUDES_DEG_N = numpy.array([3, 6, 9, 12], dtype=float) FIRST_CROPPED_LONGITUDES_DEG_E = numpy.array( [340, 345, 350, 355, 0, 5], dtype=float ) SECOND_CROPPED_DATA_MATRIX = numpy.array([ [101, 102, 103, 104, 105, 106], [113, 114, 115, 116, 117, 118], [125, 126, 127, 128, 129, 130], [125, 126, 127, 128, 129, 130] ], dtype=float) SECOND_CROPPED_LATITUDES_DEG_N = numpy.array([9, 12, 20, 28], dtype=float) SECOND_CROPPED_LONGITUDES_DEG_E = numpy.array( [10, 15, 25, 35, 45, 55], dtype=float ) THIRD_CROPPED_DATA_MATRIX = numpy.array([ [90, 91, 92, 93, 94, 95], [102, 103, 104, 105, 106, 107], [114, 115, 116, 117, 118, 119], [126, 127, 128, 129, 130, 131] ], dtype=float) THIRD_CROPPED_LATITUDES_DEG_N = numpy.array([6, 9, 12, 20], dtype=float) THIRD_CROPPED_LONGITUDES_DEG_E = numpy.array( [15, 25, 35, 45, 55, 65], dtype=float ) FOURTH_CROPPED_DATA_MATRIX = numpy.array([ [43, 44, 45, 46, 47, 48], [55, 56, 57, 58, 59, 60], [67, 68, 69, 70, 71, 72], [79, 80, 81, 82, 83, 84] ], dtype=float) FOURTH_CROPPED_LATITUDES_DEG_N = numpy.array([-4, -2, 0, 3], dtype=float) FOURTH_CROPPED_LONGITUDES_DEG_E = numpy.array( [25, 35, 45, 55, 65, 75], dtype=float ) FIFTH_CROPPED_DATA_MATRIX = numpy.array([ [98, 99, 100, 101, 102, 103], [110, 111, 112, 113, 114, 115], [122, 123, 124, 125, 126, 127], [122, 123, 124, 125, 126, 127] ], dtype=float) FIFTH_CROPPED_LATITUDES_DEG_N = numpy.array([9, 12, 20, 28], dtype=float) FIFTH_CROPPED_LONGITUDES_DEG_E = numpy.array( [355, 0, 5, 10, 15, 25], dtype=float ) # The following constants are used to test get_cyclone_id and parse_cyclone_id. YEAR = 1998 BASIN_ID_STRING = 'AL' CYCLONE_NUMBER = 5 CYCLONE_ID_STRING = '1998AL05' class SatelliteUtilsTests(unittest.TestCase): """Each method is a unit test for satellite_utils.py.""" def test_find_storm_center_px_space_first(self): """Ensures correct output from _find_storm_center_px_space. In this case, using first storm center. """ this_row, this_column = satellite_utils._find_storm_center_px_space( storm_latitude_deg_n=FIRST_STORM_LATITUDE_DEG_N, storm_longitude_deg_e=FIRST_STORM_LONGITUDE_DEG_E, grid_latitudes_deg_n=GRID_LATITUDES_DEG_N + 0., grid_longitudes_deg_e=GRID_LONGITUDES_DEG_E + 0. ) self.assertTrue(this_row == FIRST_STORM_ROW) self.assertTrue(this_column == FIRST_STORM_COLUMN) def test_find_storm_center_px_space_second(self): """Ensures correct output from _find_storm_center_px_space. In this case, using second storm center. """ this_row, this_column = satellite_utils._find_storm_center_px_space( storm_latitude_deg_n=SECOND_STORM_LATITUDE_DEG_N, storm_longitude_deg_e=SECOND_STORM_LONGITUDE_DEG_E, grid_latitudes_deg_n=GRID_LATITUDES_DEG_N + 0., grid_longitudes_deg_e=GRID_LONGITUDES_DEG_E + 0. ) self.assertTrue(this_row == SECOND_STORM_ROW) self.assertTrue(this_column == SECOND_STORM_COLUMN) def test_find_storm_center_px_space_third(self): """Ensures correct output from _find_storm_center_px_space. In this case, using third storm center. """ this_row, this_column = satellite_utils._find_storm_center_px_space( storm_latitude_deg_n=THIRD_STORM_LATITUDE_DEG_N, storm_longitude_deg_e=THIRD_STORM_LONGITUDE_DEG_E, grid_latitudes_deg_n=GRID_LATITUDES_DEG_N + 0., grid_longitudes_deg_e=GRID_LONGITUDES_DEG_E + 0. ) self.assertTrue(this_row == THIRD_STORM_ROW) self.assertTrue(this_column == THIRD_STORM_COLUMN) def test_find_storm_center_px_space_fourth(self): """Ensures correct output from _find_storm_center_px_space. In this case, using fourth storm center. """ this_row, this_column = satellite_utils._find_storm_center_px_space( storm_latitude_deg_n=FOURTH_STORM_LATITUDE_DEG_N, storm_longitude_deg_e=FOURTH_STORM_LONGITUDE_DEG_E, grid_latitudes_deg_n=GRID_LATITUDES_DEG_N + 0., grid_longitudes_deg_e=GRID_LONGITUDES_DEG_E + 0. ) self.assertTrue(this_row == FOURTH_STORM_ROW) self.assertTrue(this_column == FOURTH_STORM_COLUMN) def test_find_storm_center_px_space_fifth(self): """Ensures correct output from _find_storm_center_px_space. In this case, using fifth storm center. """ this_row, this_column = satellite_utils._find_storm_center_px_space( storm_latitude_deg_n=FIFTH_STORM_LATITUDE_DEG_N, storm_longitude_deg_e=FIFTH_STORM_LONGITUDE_DEG_E, grid_latitudes_deg_n=GRID_LATITUDES_DEG_N + 0., grid_longitudes_deg_e=GRID_LONGITUDES_DEG_E + 0. ) self.assertTrue(this_row == FIFTH_STORM_ROW) self.assertTrue(this_column == FIFTH_STORM_COLUMN) def test_crop_image_around_storm_center_first(self): """Ensures correct output from _crop_image_around_storm_center. In this case, using first storm center. """ ( this_data_matrix, these_latitudes_deg_n, these_longitudes_deg_e ) = satellite_utils._crop_image_around_storm_center( data_matrix=UNCROPPED_DATA_MATRIX + 0., grid_latitudes_deg_n=GRID_LATITUDES_DEG_N + 0., grid_longitudes_deg_e=GRID_LONGITUDES_DEG_E + 0., storm_row=FIRST_STORM_ROW, storm_column=FIRST_STORM_COLUMN, num_cropped_rows=NUM_CROPPED_ROWS, num_cropped_columns=NUM_CROPPED_COLUMNS ) self.assertTrue(numpy.allclose( this_data_matrix, FIRST_CROPPED_DATA_MATRIX, atol=TOLERANCE )) self.assertTrue(numpy.allclose( these_latitudes_deg_n, FIRST_CROPPED_LATITUDES_DEG_N, atol=TOLERANCE )) self.assertTrue(numpy.allclose( these_longitudes_deg_e, FIRST_CROPPED_LONGITUDES_DEG_E, atol=TOLERANCE )) def test_crop_image_around_storm_center_second(self): """Ensures correct output from _crop_image_around_storm_center. In this case, using second storm center. """ ( this_data_matrix, these_latitudes_deg_n, these_longitudes_deg_e ) = satellite_utils._crop_image_around_storm_center( data_matrix=UNCROPPED_DATA_MATRIX + 0., grid_latitudes_deg_n=GRID_LATITUDES_DEG_N + 0., grid_longitudes_deg_e=GRID_LONGITUDES_DEG_E + 0., storm_row=SECOND_STORM_ROW, storm_column=SECOND_STORM_COLUMN, num_cropped_rows=NUM_CROPPED_ROWS, num_cropped_columns=NUM_CROPPED_COLUMNS ) self.assertTrue(numpy.allclose( this_data_matrix, SECOND_CROPPED_DATA_MATRIX, atol=TOLERANCE )) self.assertTrue(numpy.allclose( these_latitudes_deg_n, SECOND_CROPPED_LATITUDES_DEG_N, atol=TOLERANCE )) self.assertTrue(numpy.allclose( these_longitudes_deg_e, SECOND_CROPPED_LONGITUDES_DEG_E, atol=TOLERANCE )) def test_crop_image_around_storm_center_third(self): """Ensures correct output from _crop_image_around_storm_center. In this case, using third storm center. """ ( this_data_matrix, these_latitudes_deg_n, these_longitudes_deg_e ) = satellite_utils._crop_image_around_storm_center( data_matrix=UNCROPPED_DATA_MATRIX + 0., grid_latitudes_deg_n=GRID_LATITUDES_DEG_N + 0., grid_longitudes_deg_e=GRID_LONGITUDES_DEG_E + 0., storm_row=THIRD_STORM_ROW, storm_column=THIRD_STORM_COLUMN, num_cropped_rows=NUM_CROPPED_ROWS, num_cropped_columns=NUM_CROPPED_COLUMNS ) self.assertTrue(numpy.allclose( this_data_matrix, THIRD_CROPPED_DATA_MATRIX, atol=TOLERANCE )) self.assertTrue(numpy.allclose( these_latitudes_deg_n, THIRD_CROPPED_LATITUDES_DEG_N, atol=TOLERANCE )) self.assertTrue(numpy.allclose( these_longitudes_deg_e, THIRD_CROPPED_LONGITUDES_DEG_E, atol=TOLERANCE )) def test_crop_image_around_storm_center_fourth(self): """Ensures correct output from _crop_image_around_storm_center. In this case, using fourth storm center. """ ( this_data_matrix, these_latitudes_deg_n, these_longitudes_deg_e ) = satellite_utils._crop_image_around_storm_center( data_matrix=UNCROPPED_DATA_MATRIX + 0., grid_latitudes_deg_n=GRID_LATITUDES_DEG_N + 0., grid_longitudes_deg_e=GRID_LONGITUDES_DEG_E + 0., storm_row=FOURTH_STORM_ROW, storm_column=FOURTH_STORM_COLUMN, num_cropped_rows=NUM_CROPPED_ROWS, num_cropped_columns=NUM_CROPPED_COLUMNS ) self.assertTrue(numpy.allclose( this_data_matrix, FOURTH_CROPPED_DATA_MATRIX, atol=TOLERANCE )) self.assertTrue(numpy.allclose( these_latitudes_deg_n, FOURTH_CROPPED_LATITUDES_DEG_N, atol=TOLERANCE )) self.assertTrue(numpy.allclose( these_longitudes_deg_e, FOURTH_CROPPED_LONGITUDES_DEG_E, atol=TOLERANCE )) def test_crop_image_around_storm_center_fifth(self): """Ensures correct output from _crop_image_around_storm_center. In this case, using fifth storm center. """ ( this_data_matrix, these_latitudes_deg_n, these_longitudes_deg_e ) = satellite_utils._crop_image_around_storm_center( data_matrix=UNCROPPED_DATA_MATRIX + 0., grid_latitudes_deg_n=GRID_LATITUDES_DEG_N + 0., grid_longitudes_deg_e=GRID_LONGITUDES_DEG_E + 0., storm_row=FIFTH_STORM_ROW, storm_column=FIFTH_STORM_COLUMN, num_cropped_rows=NUM_CROPPED_ROWS, num_cropped_columns=NUM_CROPPED_COLUMNS ) self.assertTrue(numpy.allclose( this_data_matrix, FIFTH_CROPPED_DATA_MATRIX, atol=TOLERANCE )) self.assertTrue(numpy.allclose( these_latitudes_deg_n, FIFTH_CROPPED_LATITUDES_DEG_N, atol=TOLERANCE )) self.assertTrue(numpy.allclose( these_longitudes_deg_e, FIFTH_CROPPED_LONGITUDES_DEG_E, atol=TOLERANCE )) def test_get_cyclone_id(self): """Ensures correct output from get_cyclone_id.""" this_id_string = satellite_utils.get_cyclone_id( year=YEAR, basin_id_string=BASIN_ID_STRING, cyclone_number=CYCLONE_NUMBER ) self.assertTrue(this_id_string == CYCLONE_ID_STRING) def test_parse_cyclone_id(self): """Ensures correct output from parse_cyclone_id.""" this_year, this_basin_id_string, this_cyclone_number = ( satellite_utils.parse_cyclone_id(CYCLONE_ID_STRING) ) self.assertTrue(this_year == YEAR) self.assertTrue(this_basin_id_string == BASIN_ID_STRING) self.assertTrue(this_cyclone_number == CYCLONE_NUMBER) if __name__ == '__main__': unittest.main()
35.398936
80
0.679113
1,877
13,310
4.37773
0.120405
0.027261
0.064866
0.043447
0.822685
0.747596
0.621151
0.592187
0.553973
0.534502
0
0.079812
0.231856
13,310
375
81
35.493333
0.723885
0.105334
0
0.348315
0
0
0.001543
0
0
0
0
0
0.108614
1
0.044944
false
0
0.011236
0
0.059925
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
0640701dcd1a146ead813b007918b9b7e6006269
1,208
py
Python
sequence_labeling/Main.py
ChrisChatz/NLP_Projects
5e272ec8260ef8840f3e7cd75f1170df13a811c4
[ "MIT" ]
null
null
null
sequence_labeling/Main.py
ChrisChatz/NLP_Projects
5e272ec8260ef8840f3e7cd75f1170df13a811c4
[ "MIT" ]
null
null
null
sequence_labeling/Main.py
ChrisChatz/NLP_Projects
5e272ec8260ef8840f3e7cd75f1170df13a811c4
[ "MIT" ]
null
null
null
from nltk.corpus import europarl_raw from nltk.tokenize import sent_tokenize import Random_Spelling_Errors,Viterbi_decoder,Language_Model,Tools #load probabilities of bigram model and vocabulary from training data print "Train language model" prob,voc=Language_Model.probabilitiesAndVoc() print "Training done" print "------------------------------------------------------------------------------" test_data=europarl_raw.english.raw('europarl-v7.el-en.en') sentences=[sent for sent in sent_tokenize(test_data[:4000])] newSentences=[] for s in sentences: newSentences.append(s) for sent in newSentences: print "Test Sentence: %s" %sent print "-------------" #Introduce random spelling errors in test dataset testSentences=Random_Spelling_Errors.randomSpellingErrors(sent) print "Error Sentence: %s" %testSentences print "-------------" #Call viterbi decoder to correct spelling errors correctedSentence=Viterbi_decoder.viterbiAlgorithm(testSentences,prob,voc) print "Corrected Sentence: %s" %correctedSentence Tools.compareSentences(sent,testSentences,correctedSentence) print "--------------------------------------------------------------------------"
40.266667
86
0.669702
127
1,208
6.259843
0.433071
0.07044
0.075472
0
0
0
0
0
0
0
0
0.004677
0.115066
1,208
29
87
41.655172
0.739008
0.134934
0
0.090909
0
0
0.279271
0.145873
0
0
0
0
0
0
null
null
0
0.136364
null
null
0.409091
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
2
064106c193787fe6f91ab64be4ae4769b0984e37
496
py
Python
LeetCode/LongestUncommonSubsequenceI.py
SelvorWhim/competitive
b9daaf21920d6f7669dc0c525e903949f4e33b62
[ "Unlicense" ]
null
null
null
LeetCode/LongestUncommonSubsequenceI.py
SelvorWhim/competitive
b9daaf21920d6f7669dc0c525e903949f4e33b62
[ "Unlicense" ]
null
null
null
LeetCode/LongestUncommonSubsequenceI.py
SelvorWhim/competitive
b9daaf21920d6f7669dc0c525e903949f4e33b62
[ "Unlicense" ]
null
null
null
# idea: the setup sounds complicated but actually reduces to a trivial problem. If the strings are not identical, the longer of the two strings can't be a subsequence of the other (or either one if they are the same length but not identical), and if they are identical, there is no uncommon subsequence class Solution: def findLUSlength(self, a, b): """ :type a: str :type b: str :rtype: int """ return -1 if (a == b) else max(len(a), len(b))
45.090909
302
0.655242
80
496
4.0625
0.6375
0.073846
0.055385
0
0
0
0
0
0
0
0
0.002747
0.266129
496
10
303
49.6
0.89011
0.683468
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
0649874dce27af534ab2e9893eb4c3bc0a97cfcc
8,250
py
Python
tests/integration/states/test_vmc_nat_rules.py
kdsalvy/salt-ext-modules-vmware-1
9fdc941692e4c526f575f33b2ce23c1470582934
[ "Apache-2.0" ]
10
2021-11-02T20:24:44.000Z
2022-03-11T05:54:27.000Z
tests/integration/states/test_vmc_nat_rules.py
waynew/salt-ext-modules-vmware
9f693382772061676c846c850df6ff508b7f3a91
[ "Apache-2.0" ]
83
2021-10-01T15:13:02.000Z
2022-03-31T16:22:40.000Z
tests/integration/states/test_vmc_nat_rules.py
waynew/salt-ext-modules-vmware
9f693382772061676c846c850df6ff508b7f3a91
[ "Apache-2.0" ]
15
2021-09-30T23:17:27.000Z
2022-03-23T06:54:22.000Z
""" Integration Tests for vmc_nat_rules state module """ import json import pytest import requests from saltext.vmware.utils import vmc_request @pytest.fixture() def nat_rule_test_data(): tier1 = "cgw" nat = "USER" nat_rule = "NAT_RULE2" translated_network = "192.168.1.1" source_network = "10.117.5.73" updated_display_name = "rule-1" updated_tags = [{"tag": "tag1", "scope": "scope1"}] return ( tier1, nat, nat_rule, translated_network, source_network, updated_display_name, updated_tags, ) @pytest.fixture def request_headers(common_data): return vmc_request.get_headers(common_data["refresh_key"], common_data["authorization_host"]) @pytest.fixture def nat_rule_url(common_data): url = ( "https://{hostname}/vmc/reverse-proxy/api/orgs/{org_id}/sddcs/{sddc_id}/" "policy/api/v1/infra/tier-1s/{tier1}/nat/{nat}/nat-rules/{nat_rule}" ) api_url = url.format( hostname=common_data["hostname"], org_id=common_data["org_id"], sddc_id=common_data["sddc_id"], tier1=common_data["tier1"], nat=common_data["nat"], nat_rule=common_data["nat_rule"], ) return api_url @pytest.fixture def nat_rules_list_url(common_data): url = ( "https://{hostname}/vmc/reverse-proxy/api/orgs/{org_id}/sddcs/{sddc_id}/" "policy/api/v1/infra/tier-1s/{tier1}/nat/{nat}/nat-rules" ) api_url = url.format( hostname=common_data["hostname"], org_id=common_data["org_id"], sddc_id=common_data["sddc_id"], tier1=common_data["tier1"], nat=common_data["nat"], ) return api_url @pytest.fixture def common_data(vmc_nsx_connect): hostname, refresh_key, authorization_host, org_id, sddc_id, verify_ssl, cert = vmc_nsx_connect data = { "hostname": hostname, "refresh_key": refresh_key, "authorization_host": authorization_host, "org_id": org_id, "sddc_id": sddc_id, "tier1": "cgw", "nat": "USER", "nat_rule": "NAT_RULE2", "verify_ssl": verify_ssl, "cert": cert, } yield data @pytest.fixture def get_nat_rules(common_data, nat_rules_list_url, request_headers): session = requests.Session() response = session.get( url=nat_rules_list_url, verify=common_data["cert"] if common_data["verify_ssl"] else False, headers=request_headers, ) response.raise_for_status() return response.json() @pytest.fixture def delete_nat_rule(get_nat_rules, nat_rule_url, request_headers, common_data): """ Sets up test requirements: Queries vmc api for nat rules Deletes nat rule if exists """ for result in get_nat_rules.get("results", []): if result["id"] == common_data["nat_rule"]: session = requests.Session() response = session.delete( url=nat_rule_url, verify=common_data["cert"] if common_data["verify_ssl"] else False, headers=request_headers, ) # raise error if any response.raise_for_status() def test_vmc_nat_rules_state_module( salt_call_cli, delete_nat_rule, vmc_nsx_connect, nat_rule_test_data ): # Invoke present state to create nat rule hostname, refresh_key, authorization_host, org_id, sddc_id, verify_ssl, cert = vmc_nsx_connect ( tier1, nat, nat_rule, translated_network, source_network, updated_display_name, updated_tags, ) = nat_rule_test_data response = salt_call_cli.run( "state.single", "vmc_nat_rules.present", name="present", hostname=hostname, refresh_key=refresh_key, authorization_host=authorization_host, org_id=org_id, sddc_id=sddc_id, tier1=tier1, nat=nat, nat_rule=nat_rule, verify_ssl=verify_ssl, cert=cert, source_network=source_network, translated_network=translated_network, ) response_json = response.json result = list(response_json.values())[0] changes = result["changes"] assert changes["old"] is None assert changes["new"]["id"] == nat_rule assert result["comment"] == "Created nat rule {}".format(nat_rule) # Test present to update with identical fields response = salt_call_cli.run( "state.single", "vmc_nat_rules.present", name="present", hostname=hostname, refresh_key=refresh_key, authorization_host=authorization_host, org_id=org_id, sddc_id=sddc_id, tier1=tier1, nat=nat, nat_rule=nat_rule, verify_ssl=verify_ssl, cert=cert, source_network=source_network, translated_network=translated_network, ) response_json = response.json result = list(response_json.values())[0] changes = result["changes"] # assert no changes are done assert changes == {} assert result["comment"] == "Nat rule exists already, no action to perform" # Invoke present state to update nat rule with new display_name response = salt_call_cli.run( "state.single", "vmc_nat_rules.present", name="present", hostname=hostname, refresh_key=refresh_key, authorization_host=authorization_host, org_id=org_id, sddc_id=sddc_id, tier1=tier1, nat=nat, nat_rule=nat_rule, verify_ssl=verify_ssl, cert=cert, source_network=source_network, translated_network=translated_network, display_name=updated_display_name, ) response_json = response.json result = list(response_json.values())[0] changes = result["changes"] assert changes["old"]["display_name"] != changes["new"]["display_name"] assert changes["new"]["display_name"] == updated_display_name assert result["comment"] == "Updated nat rule {}".format(nat_rule) # Invoke present state to update nat rule with tags field response = salt_call_cli.run( "state.single", "vmc_nat_rules.present", name="present", hostname=hostname, refresh_key=refresh_key, authorization_host=authorization_host, org_id=org_id, sddc_id=sddc_id, tier1=tier1, nat=nat, nat_rule=nat_rule, verify_ssl=verify_ssl, cert=cert, source_network=source_network, translated_network=translated_network, tags=updated_tags, ) response_json = response.json result = list(response_json.values())[0] changes = result["changes"] assert changes["new"]["tags"] == updated_tags assert result["comment"] == "Updated nat rule {}".format(nat_rule) # Invoke absent to delete the nat rule response = salt_call_cli.run( "state.single", "vmc_nat_rules.absent", name="absent", hostname=hostname, refresh_key=refresh_key, authorization_host=authorization_host, org_id=org_id, sddc_id=sddc_id, tier1=tier1, nat=nat, nat_rule=nat_rule, verify_ssl=verify_ssl, cert=cert, ) response_json = response.json result = list(response_json.values())[0] changes = result["changes"] assert changes["new"] is None assert changes["old"]["id"] == nat_rule assert result["comment"] == "Deleted nat rule {}".format(nat_rule) # Invoke absent when nat rule is not present response = salt_call_cli.run( "state.single", "vmc_nat_rules.absent", name="absent", hostname=hostname, refresh_key=refresh_key, authorization_host=authorization_host, org_id=org_id, sddc_id=sddc_id, tier1=tier1, nat=nat, nat_rule=nat_rule, verify_ssl=verify_ssl, cert=cert, ) response_json = response.json result = list(response_json.values())[0] changes = result["changes"] # assert no changes are done assert changes == {} assert result["comment"] == "No nat rule found with Id {}".format(nat_rule)
29.151943
98
0.633455
1,018
8,250
4.844794
0.128684
0.066707
0.029197
0.024534
0.758719
0.709651
0.681671
0.673966
0.649027
0.649027
0
0.008639
0.256364
8,250
282
99
29.255319
0.795273
0.059273
0
0.683544
0
0.008439
0.142265
0.026561
0
0
0
0
0.063291
1
0.033755
false
0
0.016878
0.004219
0.07173
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
064af9960754733c4a485d674ed2ed729d27d9e9
241
py
Python
7 kyu/Complete The Pattern 7 Cyclical Permutation.py
mwk0408/codewars_solutions
9b4f502b5f159e68024d494e19a96a226acad5e5
[ "MIT" ]
6
2020-09-03T09:32:25.000Z
2020-12-07T04:10:01.000Z
7 kyu/Complete The Pattern 7 Cyclical Permutation.py
mwk0408/codewars_solutions
9b4f502b5f159e68024d494e19a96a226acad5e5
[ "MIT" ]
1
2021-12-13T15:30:21.000Z
2021-12-13T15:30:21.000Z
7 kyu/Complete The Pattern 7 Cyclical Permutation.py
mwk0408/codewars_solutions
9b4f502b5f159e68024d494e19a96a226acad5e5
[ "MIT" ]
null
null
null
from math import log def pattern(n): string="".join([str(i) for i in range(1,n+1)]) res="" index=0 for i in range(n): if i: index+=int(log(i,10)) res+=string[i+index:]+string[:i+index]+"\n" return res[:-1]
26.777778
51
0.547718
44
241
3
0.5
0.136364
0.090909
0.166667
0
0
0
0
0
0
0
0.032787
0.240664
241
9
52
26.777778
0.688525
0
0
0
0
0
0.008264
0
0
0
0
0
0
1
0.111111
false
0
0.111111
0
0.333333
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
064dff5f51ba8bc0f972768eed3bb56ae7fc0190
113
py
Python
music/__model/integer/audio/d/shp/ds0.py
jedhsu/music
dea68c4a82296cd4910e786f533b2cbf861377c3
[ "MIT" ]
null
null
null
music/__model/integer/audio/d/shp/ds0.py
jedhsu/music
dea68c4a82296cd4910e786f533b2cbf861377c3
[ "MIT" ]
null
null
null
music/__model/integer/audio/d/shp/ds0.py
jedhsu/music
dea68c4a82296cd4910e786f533b2cbf861377c3
[ "MIT" ]
null
null
null
""" *D♯ - Level 0* """ from ..._pitch import Pitch __all__ = ["Ds0"] class Ds0( Pitch, ): pass
7.0625
27
0.486726
14
113
3.642857
0.785714
0
0
0
0
0
0
0
0
0
0
0.038961
0.318584
113
15
28
7.533333
0.61039
0.123894
0
0
0
0
0.035294
0
0
0
0
0
0
1
0
false
0.166667
0.166667
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
064f2c350cd53237a6f432b0a2c88ec55920662b
2,013
py
Python
gpslocation/migrations/0001_initial.py
VekotinVerstas/DjangoHttpBroker-GpsLocation
462f2370bb3171ed34694ad341354eb5b4c8e211
[ "MIT" ]
null
null
null
gpslocation/migrations/0001_initial.py
VekotinVerstas/DjangoHttpBroker-GpsLocation
462f2370bb3171ed34694ad341354eb5b4c8e211
[ "MIT" ]
null
null
null
gpslocation/migrations/0001_initial.py
VekotinVerstas/DjangoHttpBroker-GpsLocation
462f2370bb3171ed34694ad341354eb5b4c8e211
[ "MIT" ]
null
null
null
# Generated by Django 2.2 on 2019-04-27 12:49 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('broker', '0007_auto_20190427_1224'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Trackpoint', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('status', models.IntegerField(default=1)), ('time', models.DateTimeField(db_index=True)), ('lat', models.FloatField()), ('lon', models.FloatField()), ('speed', models.FloatField(blank=True, null=True)), ('course', models.FloatField(blank=True, null=True)), ('ele', models.FloatField(blank=True, null=True)), ('hacc', models.FloatField(blank=True, null=True)), ('vacc', models.FloatField(blank=True, null=True)), ('hdop', models.FloatField(blank=True, null=True)), ('vdop', models.FloatField(blank=True, null=True)), ('pdop', models.FloatField(blank=True, null=True)), ('tdop', models.FloatField(blank=True, null=True)), ('sat', models.IntegerField(blank=True, null=True)), ('satavail', models.IntegerField(blank=True, null=True)), ('created_at', models.DateTimeField(auto_now_add=True)), ('datalogger', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='broker.Datalogger')), ('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'unique_together': {('datalogger', 'time')}, }, ), ]
43.76087
141
0.582216
205
2,013
5.629268
0.390244
0.093588
0.135182
0.176776
0.385615
0.385615
0.067591
0.067591
0
0
0
0.021031
0.26776
2,013
45
142
44.733333
0.761872
0.021361
0
0
1
0
0.090447
0.011687
0
0
0
0
0
1
0
false
0
0.078947
0
0.184211
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
06563550a45959f7b7c60ff0e3d503579d489536
3,759
py
Python
crystalsearch/graph/graph.py
jingshenSN2/CrystalTool
18f07963ff5f2a54ac2c93e2fa59fada51346232
[ "MIT" ]
null
null
null
crystalsearch/graph/graph.py
jingshenSN2/CrystalTool
18f07963ff5f2a54ac2c93e2fa59fada51346232
[ "MIT" ]
null
null
null
crystalsearch/graph/graph.py
jingshenSN2/CrystalTool
18f07963ff5f2a54ac2c93e2fa59fada51346232
[ "MIT" ]
null
null
null
import networkx as nx import numpy as np def project3d(points, direction): """ 投影函数,将三维点集投影到二维 投影平面内的y方向为z轴投影(如果投影的法向量为z轴,则y方向为x轴投影) :param points: 三维点集 :param direction: 投影平面的法向量(u,v,w),投影平面通过原点(0,0,0) """ d = direction / np.linalg.norm(direction) y0 = np.array([1, 0, 0]) if np.array([0, 0, 1]).dot(d) == 1 else np.array([0, 0, 1]) y1 = y0 - np.dot(d, y0) * d norm_y = y1 / np.linalg.norm(y1) x0 = np.cross(norm_y, d) norm_x = x0 / np.linalg.norm(x0) pos = {} for k in points: p0 = np.array(points[k]) p1 = p0 - np.dot(d, p0) * d pos[k] = (np.dot(norm_y, p1), np.dot(norm_x, p1)) return pos class Graph: """ 包装nx.Graph的图类 """ def __init__(self, name, nx_graph=None): self.name = name self.info = {} self.g = nx.Graph(nx_graph) def __len__(self): return len(self.nodes()) def __getitem__(self, node): return self.g[node] def copy(self): return Graph(self.name, self.g) def add_node(self, node, **attr): self.g.add_node(node, **attr) def add_edge(self, node1, node2, **attr): self.g.add_edge(node1, node2, **attr) def remove_node(self, node): self.g.remove_node(node) def nodes(self): return self.g.nodes def edges(self): return self.g.edges def degree(self, node=None): if node is not None: return self.g.degree[node] return self.g.degree def subgraph(self, nodes): return Graph(self.name, self.g.subgraph(nodes)) def max_subgraph(self): mc = max(nx.connected_components(self.g), key=len) return Graph(self.name, self.g.subgraph(mc)) def is_connected(self): return nx.is_connected(self.g) def get_node_attributes(self, attr): return nx.get_node_attributes(self.g, attr) def get_edge_attributes(self, attr): return nx.get_edge_attributes(self.g, attr) def draw_graph(self, axes, highlight=None, direction=(0, 0, 1), rotation=None): """用matlotlib画二维投影图""" axes.clear() points = self.get_node_attributes('location') if rotation is not None: for k in points: points[k] = np.dot(points[k], rotation) pos = project3d(points, np.array(direction)) label = self.get_node_attributes('label') edge_label = self.get_edge_attributes('dist') nx.draw_networkx(self.g, pos, alpha=0.7, with_labels=False, edge_color='.4', ax=axes) if highlight is not None: nx.draw_networkx_nodes(self.g, pos=pos, nodelist=highlight, node_color='r', ax=axes) nx.draw_networkx_labels(self.g, pos, labels=label, ax=axes) nx.draw_networkx_edge_labels(self.g, pos, edge_labels=edge_label, ax=axes) axes.axis('off') def draw_3d_graph(self, axes, highlight=None): """用matlotlib画三维图""" axes.clear() points = self.get_node_attributes('location') label = self.get_node_attributes('label') if highlight is None: highlight = [] for key, value in points.items(): c = 'blue' # 普通原子为蓝色 if key in highlight: c = 'red' # 高亮原子用红色表示 xi, yi, zi = value axes.scatter(xi, yi, zi, label[key], c=c, alpha=0.9) for i, j in enumerate(self.edges()): # 用两端原子的坐标连线,绘制化学键 x = np.array((points[j[0]][0], points[j[1]][0])) y = np.array((points[j[0]][1], points[j[1]][1])) z = np.array((points[j[0]][2], points[j[1]][2])) axes.plot(x, y, z, c='black', alpha=0.9) def number_of_edges(self, u, v): return self.g.number_of_edges(u, v)
31.325
96
0.5834
550
3,759
3.863636
0.223636
0.049412
0.031059
0.039529
0.233882
0.139294
0.071529
0.041412
0
0
0
0.020833
0.272147
3,759
119
97
31.588235
0.755848
0.054536
0
0.094118
0
0
0.013718
0
0
0
0
0
0
1
0.223529
false
0
0.023529
0.117647
0.423529
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
065808dc4f4e1a8a55935330732053b91dc07114
277
py
Python
Chapter07/Chp7-Program-2.py
IBM-CSM/Cognative
0ff85d2d01664234d47eb9a168c4b49fd18d8941
[ "MIT" ]
4
2019-09-13T12:36:37.000Z
2021-10-01T23:23:52.000Z
Chapter07/Chp7-Program-2.py
urantialife/Cognitive-Computing-with-IBM-Watson
1a158ac45a7315efb9f5fc2bdb7878a668714446
[ "MIT" ]
null
null
null
Chapter07/Chp7-Program-2.py
urantialife/Cognitive-Computing-with-IBM-Watson
1a158ac45a7315efb9f5fc2bdb7878a668714446
[ "MIT" ]
6
2019-07-06T00:36:08.000Z
2022-02-21T20:01:37.000Z
response = natural_language_understanding.analyze( url="https://en.wikipedia.org/wiki/SpaceX", features=Features( categories=CategoriesOptions(limit=4), concepts=ConceptsOptions(limit=10)), clean=False ).get_result()
34.625
54
0.635379
26
277
6.653846
0.923077
0
0
0
0
0
0
0
0
0
0
0.014423
0.249097
277
7
55
39.571429
0.817308
0
0
0
0
0
0.129964
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
06598b65257213f7f6c368bf5df90050d20a5e75
823
py
Python
fexm/docker_scripts/docker_wrapper.py
fgsect/fexm
cf213c9dea3778c09c1d475e6a16b9db78a6f1e6
[ "Apache-2.0" ]
105
2018-08-09T22:13:59.000Z
2022-03-26T23:24:20.000Z
fexm/docker_scripts/docker_wrapper.py
DeadManINDIA/fexm
ca6629bbcbf79639871d3ec52bc2a7de9ae453a4
[ "Apache-2.0" ]
13
2018-08-23T13:40:04.000Z
2022-03-11T23:28:00.000Z
fexm/docker_scripts/docker_wrapper.py
DeadManINDIA/fexm
ca6629bbcbf79639871d3ec52bc2a7de9ae453a4
[ "Apache-2.0" ]
25
2018-08-09T21:56:12.000Z
2022-03-22T22:08:12.000Z
import sh from sh import docker class DockerWrapper(object): """ This class wraps docker calls to a certain image. """ def __init__(self, docker_image: str): """ :param docker_image: The name of the docker image. It should already exist. """ self.docker_image = docker_image def run_command_in_docker_container_and_return_output(self, command: [str], *args, **kwargs): """ Runs a command (with arguments) in the new docker container. :param command: A list of strings, e.g., ["echo","hello"] :return: The output. """ docker_command = docker.run("--rm=true", "--cap-add=SYS_PTRACE", self.docker_image, command, **kwargs) # type: sh.RunningCommand return str(docker_command)
32.92
100
0.608748
101
823
4.772277
0.524752
0.136929
0.093361
0
0
0
0
0
0
0
0
0
0.28068
823
24
101
34.291667
0.814189
0.353584
0
0
0
0
0.063877
0
0
0
0
0
0
1
0.222222
false
0
0.222222
0
0.666667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
2
0659f2964a91e9c1bdb5016c1e088a5a70fff44b
190
py
Python
constants.py
andodevel/rmbg
652fdd6dd2c506c415575202946fba60de999c1f
[ "MIT" ]
null
null
null
constants.py
andodevel/rmbg
652fdd6dd2c506c415575202946fba60de999c1f
[ "MIT" ]
4
2020-11-13T18:44:13.000Z
2022-02-10T01:43:09.000Z
constants.py
andodevel/rmbg
652fdd6dd2c506c415575202946fba60de999c1f
[ "MIT" ]
null
null
null
from enum import Enum class TFLogLevel(Enum): ALL = '0' # All WARN = '1' # INFO not printed ERROR = '2' # INFO and WARN not printed OFF = '3' # OFF CPU_DEVICE = "-1"
15.833333
44
0.568421
28
190
3.821429
0.678571
0.186916
0
0
0
0
0
0
0
0
0
0.038168
0.310526
190
11
45
17.272727
0.778626
0.263158
0
0
0
0
0.044444
0
0
0
0
0
0
1
0
false
0
0.142857
0
0.857143
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
2
068626cee318347b03c07209e6a4060161fae3ba
203
py
Python
Python/070climbing_stairs.py
Apocrypse/LeetCode
3ada2605ce8c8f6dadebf37a30c9c00a0d1ede39
[ "MIT" ]
4
2020-03-17T03:08:51.000Z
2022-03-14T17:33:28.000Z
Python/070climbing_stairs.py
Apocrypse/LeetCode
3ada2605ce8c8f6dadebf37a30c9c00a0d1ede39
[ "MIT" ]
null
null
null
Python/070climbing_stairs.py
Apocrypse/LeetCode
3ada2605ce8c8f6dadebf37a30c9c00a0d1ede39
[ "MIT" ]
3
2021-04-29T16:51:02.000Z
2022-03-19T17:37:56.000Z
class Solution: def climbStairs(self, n): """ :type n: int :rtype: int """ a, b = 1, 1 for _ in range(n): a, b = b, a + b return a
18.454545
29
0.374384
26
203
2.884615
0.615385
0.08
0
0
0
0
0
0
0
0
0
0.019802
0.502463
203
10
30
20.3
0.722772
0.118227
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2