hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6864a9f03cead57ca9d4c44527b87818ef4d789d
| 20
|
py
|
Python
|
example_project/some_modules/third_modules/a19.py
|
Yuriy-Leonov/cython_imports_limit_issue
|
2f9e7c02798fb52185dabfe6ce3811c439ca2839
|
[
"MIT"
] | null | null | null |
example_project/some_modules/third_modules/a19.py
|
Yuriy-Leonov/cython_imports_limit_issue
|
2f9e7c02798fb52185dabfe6ce3811c439ca2839
|
[
"MIT"
] | null | null | null |
example_project/some_modules/third_modules/a19.py
|
Yuriy-Leonov/cython_imports_limit_issue
|
2f9e7c02798fb52185dabfe6ce3811c439ca2839
|
[
"MIT"
] | null | null | null |
class A19:
pass
| 6.666667
| 10
| 0.6
| 3
| 20
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 0.35
| 20
| 2
| 11
| 10
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
6867be37f19fd6cf9263a26e92363954dd208f93
| 1,265
|
py
|
Python
|
apgwrapper.py
|
bodono/apgpy
|
a80dceb4dec51bf54a982447f80398ad1103b6c0
|
[
"BSD-2-Clause-FreeBSD"
] | 14
|
2017-02-07T17:17:27.000Z
|
2022-02-01T16:59:15.000Z
|
apgwrapper.py
|
bodono/apgpy
|
a80dceb4dec51bf54a982447f80398ad1103b6c0
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
apgwrapper.py
|
bodono/apgpy
|
a80dceb4dec51bf54a982447f80398ad1103b6c0
|
[
"BSD-2-Clause-FreeBSD"
] | 11
|
2015-09-22T10:12:10.000Z
|
2020-07-18T09:25:58.000Z
|
import numpy as np
class IWrapper:
def dot(self, other):
raise NotImplementedError("Implement in subclass")
def __add__(self, other):
raise NotImplementedError("Implement in subclass")
def __sub__(self, other):
raise NotImplementedError("Implement in subclass")
def __mul__(self, scalar):
raise NotImplementedError("Implement in subclass")
def copy(self):
raise NotImplementedError("Implement in subclass")
def norm(self):
raise NotImplementedError("Implement in subclass")
@property
def data(self):
return self
__rmul__ = __mul__
class NumpyWrapper(IWrapper):
def __init__(self, nparray):
self._nparray = nparray
def dot(self, other):
return np.inner(self.data, other.data)
def __add__(self, other):
return NumpyWrapper(self.data + other.data)
def __sub__(self, other):
return NumpyWrapper(self.data - other.data)
def __mul__(self, scalar):
return NumpyWrapper(self.data * scalar)
def copy(self):
return NumpyWrapper(np.copy(self.data))
def norm(self):
return np.linalg.norm(self.data)
@property
def data(self):
return self._nparray
__rmul__ = __mul__
| 23.425926
| 58
| 0.654545
| 143
| 1,265
| 5.468531
| 0.20979
| 0.069054
| 0.253197
| 0.268542
| 0.613811
| 0.588235
| 0.331202
| 0.331202
| 0.120205
| 0
| 0
| 0
| 0.250593
| 1,265
| 54
| 59
| 23.425926
| 0.824895
| 0
| 0
| 0.648649
| 0
| 0
| 0.099526
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.405405
| false
| 0
| 0.027027
| 0.216216
| 0.756757
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
68844816d0453d926b20f1fd4a46ee1ed08abd18
| 63
|
py
|
Python
|
tests/pyconverter-test/cases/function_parameters.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 977
|
2019-05-06T23:12:55.000Z
|
2022-03-29T19:11:44.000Z
|
tests/pyconverter-test/cases/function_parameters.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 3,980
|
2019-05-09T20:48:14.000Z
|
2022-03-28T20:33:07.000Z
|
tests/pyconverter-test/cases/function_parameters.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 306
|
2016-04-09T05:28:07.000Z
|
2019-05-02T14:23:29.000Z
|
def on_chat(num):
pass
def on_chat2(num: number):
pass
| 12.6
| 26
| 0.650794
| 11
| 63
| 3.545455
| 0.636364
| 0.25641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.238095
| 63
| 5
| 27
| 12.6
| 0.791667
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
6896a5c29598dc78d5565410a7bd12a84dfba0fb
| 7,178
|
py
|
Python
|
tests/test_sales.py
|
kelvinrandu/store-manager-api
|
814fb1e1c9c7815a9572a02ef2bcbbaa65a3d546
|
[
"MIT"
] | null | null | null |
tests/test_sales.py
|
kelvinrandu/store-manager-api
|
814fb1e1c9c7815a9572a02ef2bcbbaa65a3d546
|
[
"MIT"
] | 6
|
2018-10-18T01:39:21.000Z
|
2018-11-05T07:15:54.000Z
|
tests/test_sales.py
|
kelvinrandu/store-manager-api
|
814fb1e1c9c7815a9572a02ef2bcbbaa65a3d546
|
[
"MIT"
] | 3
|
2018-10-26T03:39:27.000Z
|
2020-09-25T16:40:44.000Z
|
import unittest
import json
import sys
from psycopg2 import connect, extras
from application.app import create_app
from application.database import conn, create_tables, delete_tables
CREATE_SALE_URL = '/api/v1/sales/'
GET_SINGLE_SALE = '/api/v1/sale/1/'
GET_ALL_SALE = '/api/v1/sales/'
CREATE_PRODUCT_URL = '/api/v1/products/'
class SaleTestCase(unittest.TestCase):
def setUp(self):
'''Initialize app and define test variables'''
self.app = create_app("testing")
self.client = self.app.test_client()
self.app_context = self.app.app_context()
self.app_context.push()
self.register_user = { "email": "john23@gmail.com", "password":"12345678", "username":"johny"}
self.login_admin = { "email": "admin@gmail.com", "password":"12345678" }
self.login_user = { "email": "john23@gmail.com", "password":"12345678" }
self.products = { "name": "name", "quantity": 70, "min_stock":68, "price":2000, "category_id":1 }
self.sales = {"product_id":1,"quantity":1 }
self.empty_product_id = {"product_id":"","quantity":2 }
self.empty_quantity = {"product_id":1,"quantity":"" }
create_tables()
def login(self):
res_login = self.client.post('/api/v1/auth/login/',
data = json.dumps(self.login_admin),
content_type='application/json')
return json.loads(res_login.data.decode())["access_token"]
def attendant_login(self):
res = self.client.post('/api/v1/auth/signup/', data=json.dumps(self.register_user),
headers=dict(Authorization="Bearer " + self.login()),
content_type = 'application/json')
res_login = self.client.post('/api/v1/auth/login/',
data = json.dumps(self.login_user),
content_type='application/json')
return json.loads(res_login.data.decode())["access_token"]
def test_create_sale(self):
'''Test for creating a sale '''
response = self.client.post(CREATE_PRODUCT_URL,
data = json.dumps(self.products),
headers=dict(Authorization="Bearer " + self.login()),
content_type = 'application/json')
resp_data = json.loads(response.data.decode())
self.assertEqual(resp_data['message'], 'product created successfully')
self.assertEqual(response.status_code, 201)
response1 = self.client.post(CREATE_SALE_URL,
data = json.dumps(self.sales),
headers=dict(Authorization="Bearer " + self.attendant_login()),
content_type = 'application/json')
resp_data = json.loads(response1.data.decode())
self.assertEqual(resp_data['message'], 'sale created successfully')
self.assertEqual(response1.status_code, 201)
def test_create_sale_empty_token(self):
'''Test for creating a sale '''
response = self.client.post(CREATE_PRODUCT_URL,
data = json.dumps(self.products),
headers=dict(Authorization="Bearer " + self.login()),
content_type = 'application/json')
resp_data = json.loads(response.data.decode())
self.assertEqual(resp_data['message'], 'product created successfully')
self.assertEqual(response.status_code, 201)
response1 = self.client.post(CREATE_SALE_URL,
data = json.dumps(self.sales),
content_type = 'application/json')
resp_data = json.loads(response1.data.decode())
self.assertEqual(resp_data['msg'], 'Missing Authorization Header')
self.assertEqual(response1.status_code, 401)
def test_create_sale_invalid_token(self):
'''Test for creating a sale '''
response = self.client.post(CREATE_PRODUCT_URL,
data = json.dumps(self.products),
headers=dict(Authorization="Bearer " + self.login()),
content_type = 'application/json')
resp_data = json.loads(response.data.decode())
self.assertEqual(resp_data['message'], 'product created successfully')
self.assertEqual(response.status_code, 201)
response1 = self.client.post(CREATE_SALE_URL,
data = json.dumps(self.sales),
headers=dict(Authorization="Bearer " + self.login()),
content_type = 'application/json')
resp_data = json.loads(response1.data.decode())
self.assertEqual(resp_data['message'], 'unauthorized ')
self.assertEqual(response1.status_code, 401)
def test_create_sale_no_empty_quantity(self):
'''Test for creating a sale '''
response = self.client.post(CREATE_PRODUCT_URL,
data = json.dumps(self.products),
headers=dict(Authorization="Bearer " + self.login()),
content_type = 'application/json')
resp_data = json.loads(response.data.decode())
self.assertEqual(resp_data['message'], 'product created successfully')
self.assertEqual(response.status_code, 201)
response1 = self.client.post(CREATE_SALE_URL,
data = json.dumps(self.empty_quantity),
headers=dict(Authorization="Bearer " + self.attendant_login()),
content_type = 'application/json')
resp_data = json.loads(response1.data.decode())
# self.assertEqual(resp_data['message'], 'quantity cannot be blank')
self.assertEqual(response1.status_code, 400)
def test_create_sale_no_empty_product_id(self):
'''Test for creating a sale '''
response = self.client.post(CREATE_PRODUCT_URL,
data = json.dumps(self.products),
headers=dict(Authorization="Bearer " + self.login()),
content_type = 'application/json')
resp_data = json.loads(response.data.decode())
self.assertEqual(resp_data['message'], 'product created successfully')
self.assertEqual(response.status_code, 201)
response1 = self.client.post(CREATE_SALE_URL,
data = json.dumps(self.empty_product_id),
headers=dict(Authorization="Bearer " + self.attendant_login()),
content_type = 'application/json')
resp_data = json.loads(response1.data.decode())
# self.assertEqual(resp_data['message'], 'quantity cannot be blank')
self.assertEqual(response1.status_code, 400)
delete_tables()
| 52.014493
| 135
| 0.569936
| 733
| 7,178
| 5.405184
| 0.145975
| 0.046441
| 0.045936
| 0.05578
| 0.777385
| 0.760727
| 0.747097
| 0.72741
| 0.72741
| 0.72741
| 0
| 0.019282
| 0.313597
| 7,178
| 138
| 136
| 52.014493
| 0.784859
| 0.042491
| 0
| 0.627273
| 0
| 0
| 0.131998
| 0
| 0
| 0
| 0
| 0
| 0.163636
| 1
| 0.072727
| false
| 0.027273
| 0.054545
| 0
| 0.154545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
68adc7160244dec7ed97d095a9607d073fd75d3b
| 22
|
py
|
Python
|
Main.py
|
kenzored2201/Python_lesson_01
|
6b3881a8d582d967b21ab3791b5b304e1e9c89ca
|
[
"MIT"
] | null | null | null |
Main.py
|
kenzored2201/Python_lesson_01
|
6b3881a8d582d967b21ab3791b5b304e1e9c89ca
|
[
"MIT"
] | 6
|
2020-06-06T01:55:00.000Z
|
2021-06-10T20:14:08.000Z
|
hello_world.py
|
AbhiK24/profiles-rest-api
|
2c2e060c9dea343b15bfdfffe01acf2ae36ba8f2
|
[
"MIT"
] | null | null | null |
print("HEllo world!")
| 11
| 21
| 0.681818
| 3
| 22
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 22
| 1
| 22
| 22
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
d7e133c468b2cad83ef20959cee3faded8784b9f
| 36,826
|
py
|
Python
|
tests/unit/states/test_crmshmod.py
|
arbulu89/salt-shaptools
|
ae2f08a885e7125d53ab5eba4a2b692e937be6fb
|
[
"Apache-2.0"
] | 2
|
2018-11-26T12:58:30.000Z
|
2019-01-16T17:55:44.000Z
|
tests/unit/states/test_crmshmod.py
|
arbulu89/salt-shaptools
|
ae2f08a885e7125d53ab5eba4a2b692e937be6fb
|
[
"Apache-2.0"
] | 2
|
2018-12-05T11:34:42.000Z
|
2019-01-16T11:43:27.000Z
|
tests/unit/states/test_crmshmod.py
|
arbulu89/salt-shaptools
|
ae2f08a885e7125d53ab5eba4a2b692e937be6fb
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
:codeauthor: Xabier Arbulu Insausti <xarbulu@suse.com>
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals, print_function
import sys
import collections
from salt import exceptions
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import skipIf, TestCase
from tests.support import mock
from tests.support.mock import (
mock_open,
MagicMock,
patch
)
# Import Salt Libs
import salt.states.crmshmod as crmshmod
class CrmshmodTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.states.crm
'''
def setup_loader_modules(self):
return {crmshmod: {'__opts__': {'test': False}}}
# 'absent' function tests
def test_absent_absent(self):
'''
Test to check absent when cluster is already absent
'''
ret = {'name': 'localhost',
'changes': {},
'result': True,
'comment': 'Cluster is already not running'}
mock_status = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
assert crmshmod.cluster_absent('localhost') == ret
mock_status.assert_called_once_with()
def test_absent_test(self):
'''
Test to check absent in test mode
'''
ret = {'name': 'localhost',
'changes': {'name': 'localhost'},
'result': None,
'comment': 'Cluster node {} would be removed'.format('localhost')}
mock_status = MagicMock(return_value=0)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
with patch.dict(crmshmod.__opts__, {'test': True}):
assert crmshmod.cluster_absent('localhost') == ret
mock_status.assert_called_once_with()
def test_absent(self):
'''
Test to check absent when cluster is running
'''
ret = {'name': 'localhost',
'changes': {'name': 'localhost'},
'result': True,
'comment': 'Cluster node removed'}
mock_status = MagicMock(return_value=0)
mock_remove = MagicMock(return_value=0)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.cluster_remove': mock_remove}):
assert crmshmod.cluster_absent('localhost') == ret
mock_status.assert_called_once_with()
mock_remove.assert_called_once_with(
host='localhost', force=True, quiet=None)
def test_absent_error(self):
'''
Test to check absent when removal fails
'''
ret = {'name': 'localhost',
'changes': {'name': 'localhost'},
'result': False,
'comment': 'Error removing cluster node'}
mock_status = MagicMock(return_value=0)
mock_remove = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.cluster_remove': mock_remove}):
assert crmshmod.cluster_absent('localhost') == ret
mock_status.assert_called_once_with()
mock_remove.assert_called_once_with(
host='localhost', force=True, quiet=None)
def test_absent_command_error(self):
'''
Test to check absent when command execution error is raised
'''
ret = {'name': 'localhost',
'changes': {},
'result': False,
'comment': 'cluster command error'}
mock_status = MagicMock(return_value=0)
mock_remove = MagicMock(
side_effect=exceptions.CommandExecutionError('cluster command error'))
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.cluster_remove': mock_remove}):
assert crmshmod.cluster_absent('localhost') == ret
mock_status.assert_called_once_with()
mock_remove.assert_called_once_with(
host='localhost', force=True, quiet=None)
# 'initialized' function tests
def test_initialized_initialized(self):
'''
Test to check initialized when cluster is already initialized
'''
ret = {'name': 'hacluster',
'changes': {},
'result': True,
'comment': 'Cluster is already initialized'}
mock_status = MagicMock(return_value=0)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
assert crmshmod.cluster_initialized('hacluster') == ret
mock_status.assert_called_once_with()
def test_initialized_test(self):
'''
Test to check initialized in test mode
'''
ret = {'name': 'hacluster',
'changes': {'name': 'hacluster'},
'result': None,
'comment': '{} would be initialized'.format('hacluster')}
mock_status = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
with patch.dict(crmshmod.__opts__, {'test': True}):
assert crmshmod.cluster_initialized('hacluster') == ret
mock_status.assert_called_once_with()
def test_initialized(self):
'''
Test to check initialized when cluster is not created yet
'''
ret = {'name': 'hacluster',
'changes': {'name': 'hacluster'},
'result': True,
'comment': 'Cluster initialized'}
mock_status = MagicMock(return_value=1)
mock_init = MagicMock(return_value=0)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.cluster_init': mock_init}):
assert crmshmod.cluster_initialized(
name='hacluster',
watchdog='/dev/watchdog',
interface='eth0',
unicast=False,
admin_ip='192.168.1.50',
sbd=True,
sbd_dev='/dev/sbd',
no_overwrite_sshkey=True,
quiet=False) == ret
mock_status.assert_called_once_with()
mock_init.assert_called_once_with(
name='hacluster',
watchdog='/dev/watchdog',
interface='eth0',
unicast=False,
admin_ip='192.168.1.50',
sbd=True,
sbd_dev='/dev/sbd',
no_overwrite_sshkey=True,
quiet=False)
def test_initialized_error(self):
'''
Test to check initialized when initialization fails
'''
ret = {'name': 'hacluster',
'changes': {'name': 'hacluster'},
'result': False,
'comment': 'Error initialazing cluster'}
mock_status = MagicMock(return_value=1)
mock_init = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.cluster_init': mock_init}):
assert crmshmod.cluster_initialized(
name='hacluster',
watchdog='/dev/watchdog',
interface='eth0',
unicast=False,
admin_ip='192.168.1.50',
sbd=True,
sbd_dev='/dev/sbd',
no_overwrite_sshkey=True,
quiet=False) == ret
mock_status.assert_called_once_with()
mock_init.assert_called_once_with(
name='hacluster',
watchdog='/dev/watchdog',
interface='eth0',
unicast=False,
admin_ip='192.168.1.50',
sbd=True,
sbd_dev='/dev/sbd',
no_overwrite_sshkey=True,
quiet=False)
def test_initialized_command_error(self):
'''
Test to check initialized when command execution error is raised
'''
ret = {'name': 'hacluster',
'changes': {},
'result': False,
'comment': 'cluster command error'}
mock_status = MagicMock(return_value=1)
mock_init = MagicMock(
side_effect=exceptions.CommandExecutionError('cluster command error'))
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.cluster_init': mock_init}):
assert crmshmod.cluster_initialized(
name='hacluster',
watchdog='/dev/watchdog',
interface='eth0',
unicast=False,
admin_ip='192.168.1.50',
sbd=True,
sbd_dev='/dev/sbd',
no_overwrite_sshkey=False,
quiet=False) == ret
mock_status.assert_called_once_with()
mock_init.assert_called_once_with(
name='hacluster',
watchdog='/dev/watchdog',
interface='eth0',
unicast=False,
admin_ip='192.168.1.50',
sbd=True,
sbd_dev='/dev/sbd',
no_overwrite_sshkey=False,
quiet=False)
# 'joined' function tests
def test_joined_joined(self):
'''
Test to check joined when node is already joined to cluster
'''
ret = {'name': 'master',
'changes': {},
'result': True,
'comment': 'Node is already joined to a cluster'}
mock_status = MagicMock(return_value=0)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
assert crmshmod.cluster_joined('master') == ret
mock_status.assert_called_once_with()
def test_joined_test(self):
'''
Test to check joined in test mode
'''
ret = {'name': 'master',
'changes': {'name': 'master'},
'result': None,
'comment': 'Node would be joined to {}'.format('master')}
mock_status = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
with patch.dict(crmshmod.__opts__, {'test': True}):
assert crmshmod.cluster_joined('master') == ret
mock_status.assert_called_once_with()
def test_joined(self):
'''
Test to check joined when cluster is not joined yet
'''
ret = {'name': 'master',
'changes': {'name': 'master'},
'result': True,
'comment': 'Node joined to the cluster'}
mock_status = MagicMock(return_value=1)
mock_join = MagicMock(return_value=0)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.cluster_join': mock_join}):
assert crmshmod.cluster_joined(
name='master',
watchdog='/dev/watchdog',
interface='eth0',
quiet=False) == ret
mock_status.assert_called_once_with()
mock_join.assert_called_once_with(
host='master',
watchdog='/dev/watchdog',
interface='eth0',
quiet=False)
def test_joined_error(self):
'''
Test to check joined when joining fails
'''
ret = {'name': 'master',
'changes': {'name': 'master'},
'result': False,
'comment': 'Error joining to the cluster'}
mock_status = MagicMock(return_value=1)
mock_join = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.cluster_join': mock_join}):
assert crmshmod.cluster_joined(
name='master',
watchdog='/dev/watchdog',
interface='eth0',
quiet=False) == ret
mock_status.assert_called_once_with()
mock_join.assert_called_once_with(
host='master',
watchdog='/dev/watchdog',
interface='eth0',
quiet=False)
def test_joined_command_error(self):
'''
Test to check joined when command execution error is raised
'''
ret = {'name': 'master',
'changes': {},
'result': False,
'comment': 'cluster command error'}
mock_status = MagicMock(return_value=1)
mock_join = MagicMock(
side_effect=exceptions.CommandExecutionError('cluster command error'))
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.cluster_join': mock_join}):
assert crmshmod.cluster_joined(
name='master',
watchdog='/dev/watchdog',
interface='eth0',
quiet=False) == ret
mock_status.assert_called_once_with()
mock_join.assert_called_once_with(
host='master',
watchdog='/dev/watchdog',
interface='eth0',
quiet=False)
# 'configured' function tests
def test_configured_test(self):
'''
Test to check configured in test mode
'''
ret = {'name': 'update',
'changes': {'method': 'update', 'url': 'file.config'},
'result': None,
'comment': 'Cluster would be configured with method {} and file {}'.format(
'update', 'file.config')}
with patch.dict(crmshmod.__opts__, {'test': True}):
assert crmshmod.cluster_configured('update', 'file.config') == ret
def test_configured_not_cluster(self):
'''
Test to check configured when the cluster is not initialized
'''
ret = {'name': 'update',
'changes': {},
'result': False,
'comment': 'Cluster is not created yet. Run cluster_initialized before'}
mock_status = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
assert crmshmod.cluster_configured('update', 'file.config') == ret
mock_status.assert_called_once_with()
def test_configured(self):
'''
Test to check configured when configuration is applied properly
'''
ret = {'name': 'update',
'changes': {'method': 'update', 'url': 'file.config'},
'result': True,
'comment': 'Cluster properly configured'}
mock_status = MagicMock(return_value=0)
mock_configured = MagicMock(return_value=0)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.configure_load': mock_configured}):
assert crmshmod.cluster_configured(
name='update',
url='file.config',
is_xml=False) == ret
mock_status.assert_called_once_with()
mock_configured.assert_called_once_with(
method='update',
url='file.config',
is_xml=False,
force=False)
def test_configured_error(self):
'''
Test to check configured when configuration fails
'''
ret = {'name': 'update',
'changes': {},
'result': False,
'comment': 'Error configuring the cluster with method {} and file {}'.format(
'update', 'file.config')}
mock_status = MagicMock(return_value=0)
mock_configured = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.configure_load': mock_configured}):
assert crmshmod.cluster_configured(
name='update',
url='file.config',
is_xml=False,
force=True) == ret
mock_status.assert_called_once_with()
mock_configured.assert_called_once_with(
method='update',
url='file.config',
is_xml=False,
force=True)
def test_configured_command_error(self):
'''
Test to check configured when command execution error is raised
'''
ret = {'name': 'update',
'changes': {},
'result': False,
'comment': 'cluster command error'}
mock_status = MagicMock(return_value=0)
mock_configured = MagicMock(
side_effect=exceptions.CommandExecutionError('cluster command error'))
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.configure_load': mock_configured}):
assert crmshmod.cluster_configured(
name='update',
url='file.config',
is_xml=False) == ret
mock_status.assert_called_once_with()
mock_configured.assert_called_once_with(
method='update',
url='file.config',
is_xml=False,
force=False)
def test_convert2dict(self):
corofile = """
# Please read the corosync.conf.5 manual page
totem {
version: 2
max_messages: 20
interface {
ringnumber: 0
}
transport: udpu
}
logging {
timestamp: on
logger_subsys {
debug: off
}
}
quorum {
expected_votes: 1
two_node: 0
}"""
corodict, _ = crmshmod._convert2dict(corofile.splitlines())
assert corodict == {
'totem': {
'version': '2',
'max_messages': '20',
'interface': {
'ringnumber': '0'
},
'transport': 'udpu'
},
'logging': {
'timestamp': 'on',
'logger_subsys': {
'debug': 'off'
}
},
'quorum': {
'expected_votes': '1',
'two_node': '0'
}
}
def test_merge_dicts1(self):
main_dict = {
'a': {
'b': 1,
'c': 2
},
'd': 3
}
changed_dict = {
'a': {
'c': 4
},
'd': 5
}
merged_dict, applied_changes = crmshmod._mergedicts(
main_dict, changed_dict, {}, '')
assert merged_dict == {
'a': {
'b': 1,
'c': 4
},
'd': 5
}
assert applied_changes == {
'.a.c': 4,
'.d': 5
}
def test_merge_dicts2(self):
main_dict = {
'a': {
'b': {
'f': 7
},
'c': 2
},
'd': 3
}
changed_dict = {
'a': {
'b': {
'f': 8
},
},
'd': 5
}
merged_dict, applied_changes = crmshmod._mergedicts(
main_dict, changed_dict, {}, '')
assert merged_dict == {
'a': {
'b': {
'f': 8
},
'c': 2
},
'd': 5
}
assert applied_changes == {
'.d': 5,
'.a.b.f': 8
}
def test_merge_dicts3(self):
main_dict = {
'a': {
'b': 1,
'c': 2
},
'd': 3
}
changed_dict = {
'e': {
'c': 4
},
'a': {
'b': 3
},
'd': 5
}
merged_dict, applied_changes = crmshmod._mergedicts(
main_dict, changed_dict, {}, '')
assert merged_dict == {
'a': {
'b': 3,
'c': 2
},
'e': {
'c': 4
},
'd': 5
}
assert applied_changes == {
'.d': 5,
'.a.b': 3,
'.e': {'c': 4}
}
def test_convert2corosync(self):
main_dict = {
'a': {
'b': {
'f': 7
},
'c': 2
},
'd': 3
}
output = crmshmod._convert2corosync(main_dict, '')
# Py2 and py3 have different way of ordering the `items` method
# For the functionality this does not really affect
if sys.version_info[0] == 2:
assert output == "a {\n\tc: 2\n\tb {\n\t\tf: 7\n\t}\n}\nd: 3\n"
else:
assert output == "a {\n\tb {\n\t\tf: 7\n\t}\n\tc: 2\n}\nd: 3\n"
@mock.patch('salt.states.crmshmod._convert2dict')
@mock.patch('salt.states.crmshmod._mergedicts')
def test_corosync_updated_already(self, mock_mergedicts, mock_convert2dict):
'''
Test to check corosync_updated when configuration is already applied
'''
ret = {'name': '/etc/corosync/corosync.conf',
'changes': {},
'result': True,
'comment': 'Corosync already has the required configuration'}
mock_convert2dict.return_value = ({'data': 1}, {})
mock_mergedicts.return_value = ({}, {})
file_content = "my corosync file content\nmy corosync file 2nd line content"
with patch("salt.utils.files.fopen", mock_open(read_data=file_content)):
assert crmshmod.corosync_updated(
name='/etc/corosync/corosync.conf',
data={'my_data': 1}) == ret
mock_convert2dict.assert_called_once_with(
['my corosync file content', 'my corosync file 2nd line content']
)
mock_mergedicts.assert_called_once_with(
{'data': 1}, {'my_data': 1}, {})
@mock.patch('salt.states.crmshmod._convert2dict')
@mock.patch('salt.states.crmshmod._mergedicts')
def test_corosync_updated_test(self, mock_mergedicts, mock_convert2dict):
'''
Test to check corosync_updated in test mode
'''
ret = {'name': '/etc/corosync/corosync.conf',
'changes': {'data': 1},
'result': None,
'comment': 'Corosync configuration would be update'}
mock_convert2dict.return_value = ({}, {})
mock_mergedicts.return_value = ({}, {'data': 1})
file_content = "my corosync file content\nmy corosync file 2nd line content"
with patch.dict(crmshmod.__opts__, {'test': True}):
with patch("salt.utils.files.fopen", mock_open(read_data=file_content)):
assert crmshmod.corosync_updated(
name='/etc/corosync/corosync.conf',
data={'my_data': 1}) == ret
mock_convert2dict.assert_called_once_with(
['my corosync file content', 'my corosync file 2nd line content']
)
mock_mergedicts.assert_called_once_with(
{}, {'my_data': 1}, {})
@mock.patch('salt.states.crmshmod._convert2corosync')
@mock.patch('salt.states.crmshmod._convert2dict')
@mock.patch('salt.states.crmshmod._mergedicts')
def test_corosync_updated(self, mock_mergedicts, mock_convert2dict, mock_convert2corosync):
'''
Test to check corosync_updated when configuration is applied
'''
ret = {'name': '/etc/corosync/corosync.conf',
'changes': {'change1': 1, 'change2': 2},
'result': True,
'comment': 'Corosync configuration file updated'}
mock_copy = MagicMock()
mock_write = MagicMock()
mock_convert2dict.return_value = ({'data': 1}, {})
mock_mergedicts.return_value = ({'updated': 2}, {'change1': 1, 'change2': 2})
mock_convert2corosync.return_value = 'new content'
file_content = "my corosync file content\nmy corosync file 2nd line content"
with patch.dict(crmshmod.__salt__, {'file.copy': mock_copy,
'file.write': mock_write}):
with patch("salt.utils.files.fopen", mock_open(read_data=file_content)):
assert crmshmod.corosync_updated(
name='/etc/corosync/corosync.conf',
data={'my_data': 1}) == ret
mock_convert2dict.assert_called_once_with(
['my corosync file content', 'my corosync file 2nd line content']
)
mock_mergedicts.assert_called_once_with(
{'data': 1}, {'my_data': 1}, {})
mock_convert2corosync.assert_called_once_with({'updated': 2})
mock_copy.assert_called_once_with(
'/etc/corosync/corosync.conf', '/etc/corosync/corosync.conf.backup')
mock_write.assert_called_once_with(
'/etc/corosync/corosync.conf', 'new content')
# 'cluster_properties_present' function tests
def test_properties_present_no_cluster(self):
'''
Test to check properties_present when the cluster is not created
'''
ret = {'name': 'name',
'changes': {},
'result': False,
'comment': 'Cluster is not created yet. Run cluster_initialized before'}
mock_status = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
assert crmshmod.cluster_properties_present('name', {'data': 'value'}) == ret
mock_status.assert_called_once_with()
def test_properties_present_test(self):
'''
Test to check properties_present in test mode
'''
ret = {'name': 'name',
'changes': {'data': 'value'},
'result': None,
'comment': 'Cluster properties would be configured'}
mock_status = MagicMock(return_value=0)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
with patch.dict(crmshmod.__opts__, {'test': True}):
assert crmshmod.cluster_properties_present('name', {'data': 'value'}) == ret
mock_status.assert_called_once_with()
def test_properties_present(self):
'''
Test to check properties_present
'''
ret = {'name': 'name',
'changes': {'data1': 'value1', 'data2': 'value2'},
'result': True,
'comment': 'Cluster properties configured'}
mock_status = MagicMock(return_value=0)
mock_configure_get_property = MagicMock()
mock_configure_property = MagicMock()
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.configure_get_property': mock_configure_get_property,
'crm.configure_property': mock_configure_property}):
assert crmshmod.cluster_properties_present(
name='name',
properties={'data1': 'value1', 'data2': 'value2'}) == ret
mock_status.assert_called_once_with()
mock_configure_get_property.assert_has_calls([
mock.call(option='data1'),
mock.call(option='data2')
])
mock_configure_property.assert_has_calls([
mock.call(option='data1', value='value1'),
mock.call(option='data2', value='value2')
])
def test_properties_present_error(self):
'''
Test to check properties_present with an error
'''
ret = {'name': 'name',
'changes': {'data3': 'value3'},
'result': False,
'comment': 'Error configuring the properties data1, data2'}
mock_status = MagicMock(return_value=0)
mock_configure_get_property = MagicMock(side_effect=[
exceptions.CommandExecutionError('err1'),
exceptions.CommandExecutionError('err2'),
"value3"
])
mock_configure_property = MagicMock()
# We need to create the dictionary this way, otherwise the items output is different in py2 and py3
data = collections.OrderedDict()
data['data1'] = 'value1'
data['data2'] = 'value2'
data['data3'] = 'value3'
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.configure_get_property': mock_configure_get_property,
'crm.configure_property': mock_configure_property}):
assert crmshmod.cluster_properties_present(
name='name',
properties=data) == ret
mock_status.assert_called_once_with()
mock_configure_get_property.assert_has_calls([
mock.call(option='data1'),
mock.call(option='data2'),
mock.call(option='data3')
])
mock_configure_property.assert_has_calls([
mock.call(option='data3', value='value3')
])
# 'cluster_rsc_defaults_present' function tests
def test_rsc_defaults_present_no_cluster(self):
'''
Test to check rsc_defaults_present when the cluster is not created
'''
ret = {'name': 'name',
'changes': {},
'result': False,
'comment': 'Cluster is not created yet. Run cluster_initialized before'}
mock_status = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
assert crmshmod.cluster_rsc_defaults_present('name', {'data': 'value'}) == ret
mock_status.assert_called_once_with()
def test_rsc_defaults_present_test(self):
'''
Test to check rsc_defaults_present in test mode
'''
ret = {'name': 'name',
'changes': {'data': 'value'},
'result': None,
'comment': 'Cluster rsc_defaults would be configured'}
mock_status = MagicMock(return_value=0)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
with patch.dict(crmshmod.__opts__, {'test': True}):
assert crmshmod.cluster_rsc_defaults_present('name', {'data': 'value'}) == ret
mock_status.assert_called_once_with()
def test_rsc_defaults_present(self):
'''
Test to check rsc_defaults_present
'''
ret = {'name': 'name',
'changes': {'data1': 'value1', 'data2': 'value2'},
'result': True,
'comment': 'Cluster rsc_defaults configured'}
mock_status = MagicMock(return_value=0)
mock_configure_rsc_defaults = MagicMock()
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.configure_rsc_defaults': mock_configure_rsc_defaults}):
assert crmshmod.cluster_rsc_defaults_present(
name='name',
rsc_defaults={'data1': 'value1', 'data2': 'value2'}) == ret
mock_status.assert_called_once_with()
mock_configure_rsc_defaults.assert_has_calls([
mock.call(option='data1', value='value1'),
mock.call(option='data2', value='value2')
])
# 'cluster_op_defaults_present' function tests
def test_op_defaults_present_no_cluster(self):
'''
Test to check op_defaults_present when the cluster is not created
'''
ret = {'name': 'name',
'changes': {},
'result': False,
'comment': 'Cluster is not created yet. Run cluster_initialized before'}
mock_status = MagicMock(return_value=1)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
assert crmshmod.cluster_op_defaults_present('name', {'data': 'value'}) == ret
mock_status.assert_called_once_with()
def test_op_defaults_present_test(self):
'''
Test to check op_defaults_present in test mode
'''
ret = {'name': 'name',
'changes': {'data': 'value'},
'result': None,
'comment': 'Cluster op_defaults would be configured'}
mock_status = MagicMock(return_value=0)
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status}):
with patch.dict(crmshmod.__opts__, {'test': True}):
assert crmshmod.cluster_op_defaults_present('name', {'data': 'value'}) == ret
mock_status.assert_called_once_with()
def test_op_defaults_present(self):
'''
Test to check op_defaults_present
'''
ret = {'name': 'name',
'changes': {'data1': 'value1', 'data2': 'value2'},
'result': True,
'comment': 'Cluster op_defaults configured'}
mock_status = MagicMock(return_value=0)
mock_configure_op_defaults = MagicMock()
with patch.dict(crmshmod.__salt__, {'crm.status': mock_status,
'crm.configure_op_defaults': mock_configure_op_defaults}):
assert crmshmod.cluster_op_defaults_present(
name='name',
op_defaults={'data1': 'value1', 'data2': 'value2'}) == ret
mock_status.assert_called_once_with()
mock_configure_op_defaults.assert_has_calls([
mock.call(option='data1', value='value1'),
mock.call(option='data2', value='value2')
])
def test_cloud_grains_present_test(self):
'''
Test to check cloud_grains_present in test mode
'''
ret = {'name': 'name',
'changes': {},
'result': None,
'comment': 'Cloud grains would be set'}
with patch.dict(crmshmod.__opts__, {'test': True}):
assert crmshmod.cloud_grains_present('name') == ret
def test_cloud_grains_present(self):
'''
Test to check cloud_grains_present
'''
ret = {'name': 'name',
'changes': {'cloud_provider': 'mycloud'},
'result': True,
'comment': 'Cloud grains set'}
mock_detect_cloud = MagicMock(return_value='mycloud')
mock_set_grains = MagicMock()
with patch.dict(crmshmod.__salt__, {'crm.detect_cloud': mock_detect_cloud,
'grains.set': mock_set_grains}):
assert crmshmod.cloud_grains_present(name='name') == ret
mock_detect_cloud.assert_called_once_with()
mock_set_grains.assert_called_once_with('cloud_provider', 'mycloud')
def test_cloud_grains_present_gcp(self):
'''
Test to check cloud_grains_present for gcp
'''
ret = {'name': 'name',
'changes': {'cloud_provider': 'google-cloud-platform',
'gcp_instance_id': 'm_id',
'gcp_instance_name': 'm_name'},
'result': True,
'comment': 'Cloud grains set'}
mock_detect_cloud = MagicMock(return_value='google-cloud-platform')
mock_set_grains = MagicMock()
mock_http_query = MagicMock(side_effect=[{'body': 'm_id'}, {'body': 'm_name'}])
with patch.dict(crmshmod.__salt__, {'crm.detect_cloud': mock_detect_cloud,
'grains.set': mock_set_grains,
'http.query': mock_http_query}):
assert crmshmod.cloud_grains_present(name='name') == ret
mock_detect_cloud.assert_called_once_with()
mock_http_query.assert_has_calls([
mock.call(
url='http://metadata.google.internal/computeMetadata/v1/instance/id',
header_dict={"Metadata-Flavor": "Google"}),
mock.call(
url='http://metadata.google.internal/computeMetadata/v1/instance/name',
header_dict={"Metadata-Flavor": "Google"})
])
mock_set_grains.assert_has_calls([
mock.call('cloud_provider', 'google-cloud-platform'),
mock.call('gcp_instance_id', 'm_id'),
mock.call('gcp_instance_name', 'm_name')
])
| 35.649564
| 107
| 0.532531
| 3,618
| 36,826
| 5.150912
| 0.077114
| 0.046684
| 0.045503
| 0.056879
| 0.84208
| 0.803177
| 0.778118
| 0.712707
| 0.674394
| 0.650944
| 0
| 0.011451
| 0.350242
| 36,826
| 1,032
| 108
| 35.684109
| 0.767385
| 0.066176
| 0
| 0.672414
| 0
| 0.002653
| 0.191766
| 0.022733
| 0
| 0
| 0
| 0
| 0.140584
| 1
| 0.055703
| false
| 0
| 0.011936
| 0.001326
| 0.070292
| 0.001326
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
d7f96484767e815125fbb5b2dc4dbeab1101f157
| 63
|
py
|
Python
|
pyradamsa/__init__.py
|
p1-olm/pyradamsa
|
522660d02a0e1fe93475fc04de60a11ae156ef8f
|
[
"MIT"
] | 14
|
2020-07-16T11:25:01.000Z
|
2022-01-26T14:01:32.000Z
|
pyradamsa/__init__.py
|
p1-olm/pyradamsa
|
522660d02a0e1fe93475fc04de60a11ae156ef8f
|
[
"MIT"
] | 2
|
2020-07-09T23:14:54.000Z
|
2021-06-13T12:55:50.000Z
|
pyradamsa/__init__.py
|
p1-olm/pyradamsa
|
522660d02a0e1fe93475fc04de60a11ae156ef8f
|
[
"MIT"
] | 2
|
2020-11-16T11:30:42.000Z
|
2021-07-09T10:34:43.000Z
|
from .pyradamsa import Radamsa
from .version import __version__
| 31.5
| 32
| 0.857143
| 8
| 63
| 6.25
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 63
| 2
| 32
| 31.5
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
cc0d96a678d37c11f35724db9e9be252a5bdf1d8
| 393
|
py
|
Python
|
patent/__init__.py
|
56kyle/patent
|
4c53f11906311ae19aa144b2b707d161b6d79274
|
[
"MIT"
] | 1
|
2021-11-25T15:10:11.000Z
|
2021-11-25T15:10:11.000Z
|
patent/__init__.py
|
56kyle/patent
|
4c53f11906311ae19aa144b2b707d161b6d79274
|
[
"MIT"
] | null | null | null |
patent/__init__.py
|
56kyle/patent
|
4c53f11906311ae19aa144b2b707d161b6d79274
|
[
"MIT"
] | null | null | null |
from .patent import Patent
import requests
def get(number, language='en'):
if is_patent(number, language=language):
return Patent(number)
raise NotAPatent('That was not a google patent')
def is_patent(number, language='en'):
return requests.get('https://patents.google.com/patent/' + number + '/' + language).status_code == 200
class NotAPatent(Exception):
pass
| 21.833333
| 106
| 0.699746
| 51
| 393
| 5.333333
| 0.54902
| 0.205882
| 0.220588
| 0.161765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009231
| 0.173028
| 393
| 17
| 107
| 23.117647
| 0.827692
| 0
| 0
| 0
| 0
| 0
| 0.170918
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.1
| 0.2
| 0.1
| 0.7
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
cc1e2c54e1a3470956375404f8dbbd0e9b1618db
| 162
|
py
|
Python
|
analysis/test_utils/valid_workflow/input_handler.py
|
qbrc-cnap/cnap
|
624683e91a64c3b4934b578c59db850242d2f94c
|
[
"MIT"
] | 1
|
2021-07-08T14:06:04.000Z
|
2021-07-08T14:06:04.000Z
|
analysis/test_utils/valid_workflow/input_handler.py
|
qbrc-cnap/cnap
|
624683e91a64c3b4934b578c59db850242d2f94c
|
[
"MIT"
] | 12
|
2020-02-12T00:10:53.000Z
|
2021-06-10T21:24:45.000Z
|
analysis/test_utils/valid_workflow/input_handler.py
|
qbrc-cnap/cnap
|
624683e91a64c3b4934b578c59db850242d2f94c
|
[
"MIT"
] | null | null | null |
def map_inputs(user, all_data, data_name, id_list):
text_input = all_data[data_name]
capitalized = text_input.upper()
return {id_list[0]:capitalized}
| 32.4
| 51
| 0.734568
| 25
| 162
| 4.4
| 0.6
| 0.127273
| 0.2
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007299
| 0.154321
| 162
| 4
| 52
| 40.5
| 0.79562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cc2d5c120c745a2d86f2f74209075a0cf1aabf5d
| 42
|
py
|
Python
|
tests/__init__.py
|
paw-lu/dressup
|
d6b7971c1d1dd2e365974dda62e06eb5c65b85d2
|
[
"MIT"
] | 15
|
2020-05-23T20:47:47.000Z
|
2022-01-02T18:57:47.000Z
|
tests/__init__.py
|
paw-lu/dressup
|
d6b7971c1d1dd2e365974dda62e06eb5c65b85d2
|
[
"MIT"
] | 154
|
2020-05-23T03:19:15.000Z
|
2021-09-10T03:21:21.000Z
|
tests/__init__.py
|
pscosta5/dressup
|
d6b7971c1d1dd2e365974dda62e06eb5c65b85d2
|
[
"MIT"
] | 1
|
2021-04-13T16:11:13.000Z
|
2021-04-13T16:11:13.000Z
|
"""Test suite for the dressup package."""
| 21
| 41
| 0.690476
| 6
| 42
| 4.833333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 42
| 1
| 42
| 42
| 0.805556
| 0.833333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0be9804474c54fb67e7e3d8de8bea4b8885ba911
| 271
|
py
|
Python
|
tests/test_simple.py
|
afulsom/afulsom.github.io
|
5191ce8a015d162878abc2a30f018101cd304f2c
|
[
"MIT"
] | null | null | null |
tests/test_simple.py
|
afulsom/afulsom.github.io
|
5191ce8a015d162878abc2a30f018101cd304f2c
|
[
"MIT"
] | null | null | null |
tests/test_simple.py
|
afulsom/afulsom.github.io
|
5191ce8a015d162878abc2a30f018101cd304f2c
|
[
"MIT"
] | null | null | null |
"""Do an integration test. Only use simple html files."""
import subprocess
def test_valid():
assert subprocess.call(['html5validator', '--root=tests/valid/']) == 0
def test_invalid():
assert subprocess.call(['html5validator', '--root=tests/invalid/']) == 1
| 22.583333
| 76
| 0.682657
| 33
| 271
| 5.545455
| 0.636364
| 0.076503
| 0.218579
| 0.371585
| 0.469945
| 0.469945
| 0
| 0
| 0
| 0
| 0
| 0.017167
| 0.140221
| 271
| 11
| 77
| 24.636364
| 0.76824
| 0.188192
| 0
| 0
| 0
| 0
| 0.317757
| 0.098131
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
0bea92c455d57d07b31440fcea476d811be95c03
| 6,759
|
py
|
Python
|
atari_model.py
|
p0werHu/CS5446_project
|
d3f1f04ff923877f0d0d873be39019045a350056
|
[
"MIT"
] | null | null | null |
atari_model.py
|
p0werHu/CS5446_project
|
d3f1f04ff923877f0d0d873be39019045a350056
|
[
"MIT"
] | null | null | null |
atari_model.py
|
p0werHu/CS5446_project
|
d3f1f04ff923877f0d0d873be39019045a350056
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from config import gamma
class QNet(nn.Module):
def __init__(self, num_inputs, num_outputs):
super(QNet, self).__init__()
self.num_inputs = num_inputs
self.num_outputs = num_outputs
self.conv1 = nn.Conv2d(4, 32, 8, 4)
self.conv2 = nn.Conv2d(32, 64, 4, 2)
self.conv3 = nn.Conv2d(64, 3, 3, 1)
self.fc1 = nn.Linear(147, 256)
self.fc2 = nn.Linear(256, num_outputs)
for m in self.modules():
if isinstance(m, nn.Linear):
nn.init.xavier_uniform(m.weight)
def forward(self, x):
x = torch.relu(self.conv1(x))
x = torch.relu(self.conv2(x))
x = torch.relu(self.conv3(x))
x = torch.flatten(x, start_dim=1)
x = torch.relu(self.fc1(x))
qvalue = self.fc2(x)
return qvalue
@classmethod
def train_model(cls, online_net, target_net, optimizer, batch, device):
states = torch.cat(batch.state, dim=0)
next_states = torch.cat(batch.next_state, dim=0)
actions = torch.Tensor(batch.action).to(device).float()
rewards = torch.Tensor(batch.reward).to(device)
masks = torch.Tensor(batch.mask).to(device)
pred = online_net(states)
next_pred = target_net(next_states)
pred = torch.sum(pred.mul(actions), dim=1)
target = rewards + masks * gamma * next_pred.max(1)[0]
loss = F.mse_loss(pred, target.detach())
optimizer.zero_grad()
loss.backward()
optimizer.step()
return loss
def get_action(self, input):
qvalue = self.forward(input)
_, action = torch.max(qvalue, 1)
return action.cpu().numpy()[0]
class DoubleDQNet(nn.Module):
def __init__(self, num_inputs, num_outputs):
super(DoubleDQNet, self).__init__()
self.num_inputs = num_inputs
self.num_outputs = num_outputs
self.conv1 = nn.Conv2d(4, 32, 8, 4)
self.conv2 = nn.Conv2d(32, 64, 4, 2)
self.conv3 = nn.Conv2d(64, 3, 3, 1)
self.fc1 = nn.Linear(147, 512)
self.fc2 = nn.Linear(512, num_outputs)
for m in self.modules():
if isinstance(m, nn.Linear):
nn.init.xavier_uniform(m.weight)
def forward(self, x):
x = torch.relu(self.conv1(x))
x = torch.relu(self.conv2(x))
x = torch.relu(self.conv3(x))
x = torch.flatten(x, start_dim=1)
x = torch.relu(self.fc1(x))
qvalue = self.fc2(x)
return qvalue
@classmethod
def train_model(cls, online_net, target_net, optimizer, batch, device):
states = torch.cat(batch.state, dim=0)
next_states = torch.cat(batch.next_state, dim=0)
actions = torch.Tensor(batch.action).to(device).float()
rewards = torch.Tensor(batch.reward).to(device)
masks = torch.Tensor(batch.mask).to(device)
pred = online_net(states)
_, action_from_online_net = online_net(next_states).max(1)
next_pred = target_net(next_states)
pred = torch.sum(pred.mul(actions), dim=1)
target = rewards + masks * gamma * next_pred.gather(1, action_from_online_net.unsqueeze(1)).squeeze(1)
loss = F.mse_loss(pred, target.detach())
optimizer.zero_grad()
loss.backward()
optimizer.step()
return loss
def get_action(self, input):
qvalue = self.forward(input)
_, action = torch.max(qvalue, 1)
return action.cpu().numpy()[0]
class DuelDQNet(nn.Module):
def __init__(self, num_inputs, num_outputs):
super(DuelDQNet, self).__init__()
self.num_inputs = num_inputs
self.num_outputs = num_outputs
self.fc = nn.Linear(num_inputs, 128)
self.fc_adv = nn.Linear(128, num_outputs)
self.fc_val = nn.Linear(128, 1)
for m in self.modules():
if isinstance(m, nn.Linear):
nn.init.xavier_uniform(m.weight)
def forward(self, x):
x = F.relu(self.fc(x))
adv = self.fc_adv(x)
adv = adv.view(-1, self.num_outputs)
val = self.fc_val(x)
val = val.view(-1, 1)
qvalue = val + (adv - adv.mean(dim=1, keepdim=True))
return qvalue
@classmethod
def train_model(cls, online_net, target_net, optimizer, batch):
states = torch.stack(batch.state)
next_states = torch.stack(batch.next_state)
actions = torch.Tensor(batch.action).float()
rewards = torch.Tensor(batch.reward)
masks = torch.Tensor(batch.mask)
pred = online_net(states).squeeze(1)
next_pred = target_net(next_states).squeeze(1)
pred = torch.sum(pred.mul(actions), dim=1)
target = rewards + masks * gamma * next_pred.max(1)[0]
loss = F.mse_loss(pred, target.detach())
optimizer.zero_grad()
loss.backward()
optimizer.step()
return loss
def get_action(self, input):
qvalue = self.forward(input)
_, action = torch.max(qvalue, 1)
return action.numpy()[0]
class PerNet(nn.Module):
def __init__(self, num_inputs, num_outputs):
super(PerNet, self).__init__()
self.num_inputs = num_inputs
self.num_outputs = num_outputs
self.fc1 = nn.Linear(num_inputs, 128)
self.fc2 = nn.Linear(128, num_outputs)
for m in self.modules():
if isinstance(m, nn.Linear):
nn.init.xavier_uniform(m.weight)
def forward(self, x):
x = F.relu(self.fc1(x))
qvalue = self.fc2(x)
return qvalue
@classmethod
def get_td_error(cls, online_net, target_net, state, next_state, action, reward, mask):
state = torch.stack(state)
next_state = torch.stack(next_state)
action = torch.Tensor(action)
reward = torch.Tensor(reward)
mask = torch.Tensor(mask)
pred = online_net(state).squeeze(1)
next_pred = target_net(next_state).squeeze(1)
pred = torch.sum(pred.mul(action), dim=1)
target = reward + mask * gamma * next_pred.max(1)[0]
td_error = pred - target.detach()
return td_error
@classmethod
def train_model(cls, online_net, target_net, optimizer, batch, weights):
td_error = cls.get_td_error(online_net, target_net, batch.state, batch.next_state, batch.action, batch.reward, batch.mask)
loss = pow(td_error, 2) * weights
loss = loss.mean()
optimizer.zero_grad()
loss.backward()
optimizer.step()
return loss
def get_action(self, input):
qvalue = self.forward(input)
_, action = torch.max(qvalue, 1)
return action.numpy()[0]
| 30.722727
| 130
| 0.603344
| 923
| 6,759
| 4.254605
| 0.12026
| 0.04329
| 0.036669
| 0.034632
| 0.781767
| 0.75452
| 0.729055
| 0.703591
| 0.703591
| 0.703591
| 0
| 0.027011
| 0.27149
| 6,759
| 220
| 131
| 30.722727
| 0.770512
| 0
| 0
| 0.674699
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.10241
| false
| 0
| 0.024096
| 0
| 0.228916
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
04109620882e581ef066e52ac2979d1e8cc9f7ff
| 3,856
|
py
|
Python
|
appengine/components/components/prpc/test/test_prpc_pb2.py
|
maruel/swarming
|
8ab7568635fcbfd85a01884b64704fc2a1ac13c7
|
[
"Apache-2.0"
] | 74
|
2015-04-01T02:35:15.000Z
|
2021-12-17T22:10:56.000Z
|
appengine/components/components/prpc/test/test_prpc_pb2.py
|
maruel/swarming
|
8ab7568635fcbfd85a01884b64704fc2a1ac13c7
|
[
"Apache-2.0"
] | 123
|
2015-04-01T04:02:57.000Z
|
2022-03-02T12:49:55.000Z
|
appengine/components/components/prpc/test/test_prpc_pb2.py
|
maruel/swarming
|
8ab7568635fcbfd85a01884b64704fc2a1ac13c7
|
[
"Apache-2.0"
] | 32
|
2015-04-03T01:40:47.000Z
|
2021-11-13T15:20:13.000Z
|
# Generated by the pRPC protocol buffer compiler plugin. DO NOT EDIT!
# source: test.proto
import base64
import zlib
from google.protobuf import descriptor_pb2
# Includes description of the test.proto and all of its transitive
# dependencies. Includes source code info.
FILE_DESCRIPTOR_SET = descriptor_pb2.FileDescriptorSet()
FILE_DESCRIPTOR_SET.ParseFromString(zlib.decompress(base64.b64decode(
'eJylV09z29YRN0jZYZ5kG2I8GVVO7DVjV3JCU46UaVp7pjMgCUlwKZIFQKvq9GAQfKRQkwALgH'
'I1mXyBfodeculMO9Pv0Jke+z167LWHHvrbB5CCnbo9lBfu27d/frtv3+6D+P1NIVKZpI15HKVR'
'dY3p7buTKJpM5Z7iDRfjPTmbp5eZSO2uWD8KLqQtf7OAbHVDaLMtjbTdsq3Nap+IDdd7jc1kHo'
'WJ5N3Xy93XtYZYN/3zaKl6X2ix2lzf32woEAXDthbXPhcbmXxubVtU4pyGXnn3Q3u13v9OE2su'
'Wz0Qa2ym+n2T2x83ssAay8AaJgdWu1b9CsrAXX2PxHY1M1aMDVp7Yo3xLV0VYlsqFOHXrr34gx'
'AVvaJf0x/rmvh7qbKhFtX9P2vUiuaXcTA5T2nXf0z7T7/8mtxzSZ1ByyJjkZ5HcdIgYzolJZQQ'
'ApfxhRw1BA0SSdGY0vMgoSRaxL4kPxpJwnISXcg4lCNahCMZQ0SSMfd8Nhz4EqDq9FLGSRCFtN'
'94KiDgpeR7IQ0ljSMoURAqrY7VMruOSeMAyRH7f9KADvZ5SSM5DkKZkAe3sxlMqeT50ZTSiODL'
'f01v2K7np3DEchMcSigomM3hPAq9lBEiHuV6FBFs5EIcpwoJcR5GMcnferP5FLCDFU4ojmh4CS'
'VJ3nxO3sQLwgTuwgjI4wZyXinpAolGrVfKlWv6BuhH4leVG6Bvg/6Brm13iauHc+YRHx1xepEj'
'pCFWjCCc0IUXB9EiobkX4whUziXN7X5LSQN/vAjTYAawYoOtVzTYr+gboq1WJfjb1Ev6s+2viO'
'sSWuGIHV5404XkbLG9zFQdOQv8cwaUpFHMBy30pRXY3dRv6EVOCZx1/eMCpwzOA/1Hwso5mn4H'
'vp9v/4S4jJHYdBHjNNhl5n8eywsOb3qZu+RzuEJUAKApY0UAGgDcAQAqcMrgfKF/LYY5p6RvAc'
'CX2zbxtYCPGDkNxoHMQAThfIFTiyeLmQz5+EaU3W6k6E2QIhVpnbxpAk3vEpqCds7ldBo92Ckg'
'4zC33kKW+S2mpgRkW0jNExzSGo7kLkrgU1xItYL+XRzZR+KWWvGRfQLUt6CdryvXFeeDAkcDp6'
'J/WOCUwdlAwbFNTb8PDw9yD5paV/Q7yoOmPNDKg5Z7oJUHLfdAKw9a7oFWHkr6Z1zSuQfOwWer'
'GErKw0PYu6O01bpyQ3FuFjgaOLf02wVOGZwqrLDNsr6Tty21gvTOKoay8rALe/eVtlpX1hRHFD'
'jXwVnXNwscDZyqvl3glMH5VL83vKF6yIH4a1X8t2lUvf1Ov659IK6rlt28EB+hHb3bz5tC7fZ5'
'2dd+uTNBYS2GDUjuTaKpF06u3EBsLpPM2z817btS+ajf/GPp3lFmsb+cEKcowp+F0ZvQZfkXv9'
'tEj7+HZB3ouvjbBnr8PdXj/7JB/WVfbC7GY3Q+ekKZsZ2ERl7q4QqkMvbPAUP1nZmXisJg2H/6'
'9Me5Almh36D3zIPzNJ0nz/b2RvJCTiNuscsscJjL5vxkmIHYE4JsOQr4Og4X3KHV1UNX5d6fzx'
'PmDIPQiy8VrqSe3Ui0Rv6PFsA5i0a4y77HFnBPY/QTGc+ClHs7fF4EIxBqvvBlH0e4uW+4p/q4'
'4EE+GGIJOzJ9Bkj8+/wdYKrlFifcbIFGjUbm5VPKG2LeYSvPmCCMAPTwejYbpzDFFooew9E7cO'
'DPn3ro4eh37wEBZ4VcLEEgxtECwFY4xBWQ/wuHWM7gUeSr1ugtD2kP+VczjlApMg7QHK9SrQ4I'
'm4KK6FdBdWWgNNlw6M3UC6JYW2F0tafyHqQJRxRmpvAcgdPL1fzF7MIsA1dyUQDELEp5onBOUJ'
'14fGDejWiMDbF8qYzTN1wmeQVRMpc+VxC0Ai6smGsnzKooSRR2Qe6x5ZDTO3RPDdsk0H2799Jq'
'm21qnmHTpFavf2ZbR8cuHfc6bdN2yOi2we26ttUcuD3bEVQzHKjW1I7RPSPzF33bdBzq2WSd9D'
'sWrMG8bXRdy3TqZHVbnUHb6h7VCRao23MF3kMnlgs5t1dXbr+vR71DOjHt1jGWRtPqWO6Zcnho'
'uV12dtizBRnUN2zXag06hk39gd3v4Y3FkbUtp9UxrBOz3YB/+CTzpdl1yTk2Op23AxXUO+2aNq'
'MvhklNEyiNZsdkVyrOtmWbLZcDuqJaSB4AduqCnL7ZskAhHybCMeyzem7UMX8+gBQ2qW2cGEeI'
'bvd/ZQUH0xrY5gmjRiqcQdNxLXfgmnTU67VVsh3Tfol3pfOcOj1HJWzgmADSNlxDuYYNpAv7oJ'
'sDx1KJs7quaduDvmv1uo9xyqfIDFAa0G2rDPe6HC3Xitmzz9gs50GdQJ1Oj03wbU6qypbBaXCQ'
'tZZbFINDJBEhXcVJXfOoYx2Z3ZbJ2z02c2o55mMcmOWwgKUcowbgdKCi5oMCLpHRhdKtq/Mk65'
'CM9kuLkefSqADHystFpa11nOc8f8gSpskWqIpeA/WcmZVHOc1cfgn8VHHXc5q5D0HVFVfLaeY+'
'AvWF4i5ppn4Iqqa4IqeZy/P/geI+zGnm7oK6r7j3c/pfJfWoOsBC3/5HCSU+kSGuv09qkqK/J4'
'k3kdkouIwW6hkfyyc8cNBFvIsoGGWfFKoNLuZTHipyJN7WV20Y6jEZfYs/jQjjGpLT5ScCv5xh'
'T80xvCazN2acfZ4Jyrvb8vORlVULBBbY4yF0Ho0a6pODPya80JfLqbT8MDiMIvomYxHFc5+aXr'
'z7Hz8fH68e2+/Zf56Z+ZYbHFC9cFDCPFEw05ftnj9DXinpVxxZlgslGA1/Lf2UXn3z7Sv13ZE9'
'Yg/wPLu5fEb9G3qTMik=')))
_INDEX = {
f.name: {
'descriptor': f,
'services': {s.name: s for s in f.service},
}
for f in FILE_DESCRIPTOR_SET.file
}
TestServiceDescription = {
'file_descriptor_set': FILE_DESCRIPTOR_SET,
'file_descriptor': _INDEX[u'test.proto']['descriptor'],
'service_descriptor': _INDEX[u'test.proto']['services'][u'Test'],
}
| 58.424242
| 80
| 0.860477
| 204
| 3,856
| 16.181373
| 0.754902
| 0.025447
| 0.02575
| 0.019085
| 0.026659
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135736
| 0.079098
| 3,856
| 65
| 81
| 59.323077
| 0.793861
| 0.050052
| 0
| 0
| 1
| 0
| 0.804811
| 0.768726
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.053571
| 0
| 0.053571
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0449be2cbd88e86ca1de0b2d90b12e3d0746a60f
| 7,273
|
py
|
Python
|
test/test_any_datetime_str.py
|
jwodder/anys
|
5e3ebfe6842ddce9bb9ff58be9b651f7baf6f650
|
[
"MIT"
] | 4
|
2021-06-25T04:40:06.000Z
|
2021-12-14T23:07:16.000Z
|
test/test_any_datetime_str.py
|
jwodder/anys
|
5e3ebfe6842ddce9bb9ff58be9b651f7baf6f650
|
[
"MIT"
] | null | null | null |
test/test_any_datetime_str.py
|
jwodder/anys
|
5e3ebfe6842ddce9bb9ff58be9b651f7baf6f650
|
[
"MIT"
] | null | null | null |
from datetime import date, datetime, time, timezone
from typing import Any
import pytest
from anys import (
ANY_AWARE_DATETIME_STR,
ANY_AWARE_TIME_STR,
ANY_DATE_STR,
ANY_DATETIME_STR,
ANY_NAIVE_DATETIME_STR,
ANY_NAIVE_TIME_STR,
ANY_TIME_STR,
)
from test_lib import assert_equal, assert_not_equal
@pytest.mark.parametrize(
"value",
[
"2021-06-24T19:40:06",
"2021-06-24T19:40:06Z",
"2021-06-24T19:40:06+00:00",
"2021-06-24T19:40:06+0000",
"2021-06-24T19:40:06+00",
"2021-06-24T15:40:06-04:00",
"2021-06-24T15:40:06-0400",
"2021-06-24T15:40:06-04",
"2021-06-24 19:40:06",
"2021-06-24 19:40:06Z",
"2021-06-24 19:40:06+00:00",
"2021-06-24 19:40:06+0000",
"2021-06-24 15:40:06-04:00",
"2021-06-24 15:40:06-0400",
"2021-06-24 15:40:06-04",
"2021-06-24T19:40",
"2021-06-24T19:40Z",
"2021-06-24T19:40:06.1",
"2021-06-24T19:40:06.1Z",
"2021-06-24T19:40:06.123456",
"2021-06-24T19:40:06.123456Z",
],
)
def test_any_datetime_str_eq(value: Any) -> None:
assert_equal(ANY_DATETIME_STR, value)
@pytest.mark.parametrize(
"value",
[
"2021-13-01T12:34:56Z",
"2021-01-32T12:34:56Z",
"2021-01-01T24:34:56Z",
"2021-01-01T12:60:56Z",
"2021-01-01T12:34:60Z",
"2021-06-24",
"12:34:56",
"12:34:56Z",
datetime(2021, 6, 24, 19, 40, 6),
datetime(2021, 6, 24, 19, 40, 6, tzinfo=timezone.utc),
1624563606,
"1624563606",
],
)
def test_any_datetime_str_neq(value: Any) -> None:
assert_not_equal(ANY_DATETIME_STR, value)
def test_any_datetime_str_repr() -> None:
assert repr(ANY_DATETIME_STR) == "ANY_DATETIME_STR"
@pytest.mark.parametrize(
"value",
[
"2021-06-24T19:40:06Z",
"2021-06-24T19:40:06+00:00",
"2021-06-24T19:40:06+0000",
"2021-06-24T19:40:06+00",
"2021-06-24T15:40:06-04:00",
"2021-06-24T15:40:06-0400",
"2021-06-24T15:40:06-04",
"2021-06-24 19:40:06Z",
"2021-06-24 19:40:06+00:00",
"2021-06-24 19:40:06+0000",
"2021-06-24 15:40:06-04:00",
"2021-06-24 15:40:06-0400",
"2021-06-24 15:40:06-04",
"2021-06-24T19:40Z",
"2021-06-24T19:40:06.1Z",
"2021-06-24T19:40:06.123456Z",
],
)
def test_any_aware_datetime_str_eq(value: Any) -> None:
assert_equal(ANY_AWARE_DATETIME_STR, value)
@pytest.mark.parametrize(
"value",
[
"2021-06-24T19:40:06",
"2021-06-24 19:40:06",
"2021-13-01T12:34:56",
"2021-01-32T12:34:56",
"2021-01-01T24:34:56",
"2021-01-01T12:60:56",
"2021-01-01T12:34:60",
"2021-06-24",
"12:34:56",
"12:34:56Z",
datetime(2021, 6, 24, 19, 40, 6),
datetime(2021, 6, 24, 19, 40, 6, tzinfo=timezone.utc),
1624563606,
"1624563606",
],
)
def test_any_aware_datetime_str_neq(value: Any) -> None:
assert_not_equal(ANY_AWARE_DATETIME_STR, value)
def test_any_aware_datetime_str_repr() -> None:
assert repr(ANY_AWARE_DATETIME_STR) == "ANY_AWARE_DATETIME_STR"
@pytest.mark.parametrize(
"value",
[
"2021-06-24T19:40",
"2021-06-24T19:40:06",
"2021-06-24T19:40:06.1",
"2021-06-24T19:40:06.123456",
],
)
def test_any_naive_datetime_str_eq(value: Any) -> None:
assert_equal(ANY_NAIVE_DATETIME_STR, value)
@pytest.mark.parametrize(
"value",
[
"2021-06-24T19:40:06Z",
"2021-06-24 19:40:06Z",
"2021-13-01T12:34:56Z",
"2021-01-32T12:34:56Z",
"2021-01-01T24:34:56Z",
"2021-01-01T12:60:56Z",
"2021-01-01T12:34:60Z",
"2021-06-24",
"12:34:56",
"12:34:56Z",
datetime(2021, 6, 24, 19, 40, 6),
datetime(2021, 6, 24, 19, 40, 6, tzinfo=timezone.utc),
1624563606,
"1624563606",
],
)
def test_any_naive_datetime_str_neq(value: Any) -> None:
assert_not_equal(ANY_NAIVE_DATETIME_STR, value)
def test_any_naive_datetime_str_repr() -> None:
assert repr(ANY_NAIVE_DATETIME_STR) == "ANY_NAIVE_DATETIME_STR"
def test_any_date_str_eq() -> None:
assert_equal(ANY_DATE_STR, "2021-06-24")
@pytest.mark.parametrize(
"value",
[
"2021-06-24T19:40:06",
"2021-06-24T19:40:06Z",
"2021-13-01",
"2021-01-32",
"12:34:56",
"12:34:56Z",
date(2021, 6, 24),
],
)
def test_any_date_str_neq(value: Any) -> None:
assert_not_equal(ANY_DATE_STR, value)
def test_any_date_str_repr() -> None:
assert repr(ANY_DATE_STR) == "ANY_DATE_STR"
@pytest.mark.parametrize(
"value",
[
"19:40:06",
"19:40:06Z",
"19:40:06+00:00",
"19:40:06+0000",
"19:40:06+00",
"15:40:06-04:00",
"15:40:06-0400",
"15:40:06-04",
"19:40",
"19:40Z",
"19:40:06.1",
"19:40:06.1Z",
"19:40:06.123456",
"19:40:06.123456Z",
],
)
def test_any_time_str_eq(value: Any) -> None:
assert_equal(ANY_TIME_STR, value)
@pytest.mark.parametrize(
"value",
[
"2021-06-24T19:40:06",
"2021-06-24T19:40:06Z",
"24:34:56Z",
"12:60:56Z",
"12:34:60Z",
"2021-06-24",
time(19, 40, 6),
time(19, 40, 6, tzinfo=timezone.utc),
],
)
def test_any_time_str_neq(value: Any) -> None:
assert_not_equal(ANY_TIME_STR, value)
def test_any_time_str_repr() -> None:
assert repr(ANY_TIME_STR) == "ANY_TIME_STR"
@pytest.mark.parametrize(
"value",
[
"19:40Z",
"19:40:06Z",
"19:40:06+00:00",
"19:40:06+0000",
"19:40:06+00",
"15:40:06-04:00",
"15:40:06-0400",
"15:40:06-04",
"19:40:06.1Z",
"19:40:06.123456Z",
],
)
def test_any_aware_time_str_eq(value: Any) -> None:
assert_equal(ANY_AWARE_TIME_STR, value)
@pytest.mark.parametrize(
"value",
[
"19:40:06",
"2021-06-24T19:40:06",
"2021-06-24T19:40:06Z",
"24:34:56Z",
"12:60:56Z",
"12:34:60Z",
"2021-06-24",
time(19, 40, 6),
time(19, 40, 6, tzinfo=timezone.utc),
],
)
def test_any_aware_time_str_neq(value: Any) -> None:
assert_not_equal(ANY_AWARE_TIME_STR, value)
def test_any_aware_time_str_repr() -> None:
assert repr(ANY_AWARE_TIME_STR) == "ANY_AWARE_TIME_STR"
@pytest.mark.parametrize(
"value",
[
"19:40:06",
"19:40:06.1",
"19:40:06.123456",
],
)
def test_any_naive_time_str_eq(value: Any) -> None:
assert_equal(ANY_NAIVE_TIME_STR, value)
@pytest.mark.parametrize(
"value",
[
"19:40:06Z",
"2021-06-24T19:40:06",
"2021-06-24T19:40:06Z",
"24:34:56",
"12:60:56",
"12:34:60",
"2021-06-24",
time(19, 40, 6),
time(19, 40, 6, tzinfo=timezone.utc),
],
)
def test_any_naive_time_str_neq(value: Any) -> None:
assert_not_equal(ANY_NAIVE_TIME_STR, value)
def test_any_naive_time_str_repr() -> None:
assert repr(ANY_NAIVE_TIME_STR) == "ANY_NAIVE_TIME_STR"
| 23.924342
| 67
| 0.564416
| 1,118
| 7,273
| 3.483005
| 0.053667
| 0.063688
| 0.090395
| 0.100154
| 0.917822
| 0.883154
| 0.795583
| 0.691577
| 0.680791
| 0.567283
| 0
| 0.30236
| 0.26014
| 7,273
| 303
| 68
| 24.0033
| 0.421297
| 0
| 0
| 0.643678
| 0
| 0
| 0.305651
| 0.071497
| 0
| 0
| 0
| 0
| 0.084291
| 1
| 0.08046
| false
| 0
| 0.019157
| 0
| 0.099617
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
044bc4aa684d35df63ae38654141d908988bdffa
| 53
|
py
|
Python
|
episuite/mobility/__init__.py
|
perone/episuite
|
d457bd9505cf8b9c9e1f264cdc711711057c862a
|
[
"MIT"
] | 22
|
2021-03-14T23:07:59.000Z
|
2021-06-28T05:34:47.000Z
|
episuite/mobility/__init__.py
|
perone/episuite
|
d457bd9505cf8b9c9e1f264cdc711711057c862a
|
[
"MIT"
] | 1
|
2021-11-29T14:29:42.000Z
|
2022-01-26T14:03:40.000Z
|
episuite/mobility/__init__.py
|
perone/episuite
|
d457bd9505cf8b9c9e1f264cdc711711057c862a
|
[
"MIT"
] | 3
|
2021-03-14T23:12:57.000Z
|
2021-11-26T17:33:14.000Z
|
from episuite.mobility import facebook # noqa(F401)
| 26.5
| 52
| 0.792453
| 7
| 53
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 0.132075
| 53
| 1
| 53
| 53
| 0.847826
| 0.188679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0450179b601f51dee2335504f6e5024e4ee112da
| 233
|
py
|
Python
|
test/test_logger.py
|
cytopz/arknights-farmer
|
4140880b61a2fd08c0d2a5f51b1c8e9a65be6afc
|
[
"MIT"
] | 4
|
2020-09-09T10:43:21.000Z
|
2021-05-27T22:50:33.000Z
|
test/test_logger.py
|
rahagi/arknights-farmer
|
4140880b61a2fd08c0d2a5f51b1c8e9a65be6afc
|
[
"MIT"
] | null | null | null |
test/test_logger.py
|
rahagi/arknights-farmer
|
4140880b61a2fd08c0d2a5f51b1c8e9a65be6afc
|
[
"MIT"
] | null | null | null |
from arknights_farmer.utils.logger import Logger
Logger.log(f'this is info with fstring {2+2}')
Logger.log('this is info')
Logger.log('this is warning', 'warn')
Logger.log('this is error', 'error')
Logger.log_debug('this is debug')
| 29.125
| 48
| 0.738197
| 40
| 233
| 4.25
| 0.45
| 0.264706
| 0.229412
| 0.264706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009615
| 0.107296
| 233
| 8
| 49
| 29.125
| 0.807692
| 0
| 0
| 0
| 0
| 0
| 0.397436
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0458a693d0923265a5c0fc9cb189af2cc9e6a760
| 18,974
|
py
|
Python
|
misc/migrations/versions/923a44238cd9_.py
|
certeu/do-portal
|
5022def5c584a4b1545397bcc3e982f9eb3f5cc0
|
[
"BSD-3-Clause"
] | 8
|
2017-01-18T10:35:04.000Z
|
2020-10-22T14:54:58.000Z
|
misc/migrations/versions/923a44238cd9_.py
|
certeu/do-portal
|
5022def5c584a4b1545397bcc3e982f9eb3f5cc0
|
[
"BSD-3-Clause"
] | 5
|
2018-01-24T09:11:47.000Z
|
2021-12-13T19:41:57.000Z
|
misc/migrations/versions/923a44238cd9_.py
|
certeu/do-portal
|
5022def5c584a4b1545397bcc3e982f9eb3f5cc0
|
[
"BSD-3-Clause"
] | 5
|
2017-02-27T12:39:25.000Z
|
2017-08-30T08:00:02.000Z
|
"""Add initial tables
Revision ID: 923a44238cd9
Revises: None
Create Date: 2016-02-08 11:08:50.951551
"""
# revision identifiers, used by Alembic.
revision = '923a44238cd9'
down_revision = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
op.create_table(
'ah_bot_types',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=30), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'deliverables',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'emails',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'organization_groups',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('color', sa.String(length=7), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'report_types',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'roles',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=64), nullable=True),
sa.Column('default', sa.Boolean(), nullable=True),
sa.Column('permissions', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_index(
op.f('ix_roles_default'),
'roles',
['default'],
unique=False
)
op.create_table(
'tags',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table(
'tasks_groupmeta',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('taskset_id', sa.String(length=255), nullable=True),
sa.Column('result', sa.BLOB(), nullable=True),
sa.Column('date_done', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('taskset_id')
)
op.create_table(
'tasks_taskmeta',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('task_id', sa.String(length=255), nullable=True),
sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('result', sa.BLOB(), nullable=True),
sa.Column('date_done', sa.DateTime(), nullable=True),
sa.Column('traceback', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('task_id')
)
op.create_table(
'ah_bots',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('bot_type_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=30), nullable=True),
sa.Column('description', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['bot_type_id'], ['ah_bot_types.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'deliverable_files',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('deliverable_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('is_sla', mysql.TINYINT(display_width=1), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['deliverable_id'], ['deliverables.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'organizations',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('organization_group_id', sa.Integer(), nullable=True),
sa.Column('is_sla', mysql.TINYINT(display_width=1), nullable=True),
sa.Column('abbreviation', sa.String(length=255), nullable=True),
sa.Column('old_ID', sa.String(length=5), nullable=True),
sa.Column('full_name', sa.String(length=255), nullable=True),
sa.Column('mail_template', sa.String(length=50), nullable=True),
sa.Column('mail_times', sa.Integer(), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['organization_group_id'],
['organization_groups.id'],
name='fk_org_group_id'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(
op.f('ix_organizations_abbreviation'),
'organizations',
['abbreviation'],
unique=False
)
op.create_table(
'ah_runtime_configs',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('alias', sa.String(length=2), nullable=True),
sa.Column('ah_bot_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['ah_bot_id'], ['ah_bots.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'ah_startup_configs',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('ah_bot_id', sa.Integer(), nullable=True),
sa.Column('enabled', sa.Boolean(), nullable=True),
sa.Column('module', sa.String(length=255), nullable=True),
sa.Column('state', sa.Boolean(), nullable=True),
sa.Column('pid', sa.Integer(), nullable=True),
sa.Column('started', sa.DateTime(), nullable=True),
sa.Column('stopped', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['ah_bot_id'], ['ah_bots.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'asn',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('organization_id', sa.Integer(), nullable=True),
sa.Column('asn', sa.Integer(), nullable=True),
sa.Column('as_name', sa.String(length=255), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'contactemails_organizations',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email_id', sa.Integer(), nullable=False),
sa.Column('organization_id', sa.Integer(), nullable=False),
sa.Column('cp_access', sa.Boolean(), nullable=True),
sa.Column('fmb', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['email_id'], ['emails.id'], ),
sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ),
sa.PrimaryKeyConstraint('id', 'email_id', 'organization_id')
)
op.create_table(
'contacts',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('organization_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('position', sa.String(length=255), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'emails_organizations',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email_id', sa.Integer(), nullable=True),
sa.Column('organization_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['email_id'], ['emails.id'], ),
sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'fqdns',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('organization_id', sa.Integer(), nullable=True),
sa.Column('fqdn', sa.String(length=255), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'ip_ranges',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('organization_id', sa.Integer(), nullable=True),
sa.Column('ip_range', sa.String(length=255), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'users',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.Column('organization_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=False),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('api_key', sa.String(length=64), nullable=True),
sa.Column('is_admin', sa.Boolean(), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.Column('otp_secret', sa.String(length=16), nullable=True),
sa.Column('otp_enabled', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
op.create_table(
'ah_runtime_config_params',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('ah_runtime_config_id', sa.Integer(), nullable=True),
sa.Column('key', sa.String(length=100), nullable=False),
sa.Column('value', sa.String(length=100), nullable=False),
sa.ForeignKeyConstraint(['ah_runtime_config_id'],
['ah_runtime_configs.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'ah_startup_config_params',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('ah_startup_config_id', sa.Integer(), nullable=True),
sa.Column('key', sa.String(length=100), nullable=False),
sa.Column('value', sa.String(length=100), nullable=False),
sa.ForeignKeyConstraint(['ah_startup_config_id'],
['ah_startup_configs.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'fqdns_typosquats',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('fqdn_id', sa.Integer(), nullable=True),
sa.Column('fqdn', sa.String(length=255), nullable=True),
sa.Column('dns_a', sa.String(length=255), nullable=True),
sa.Column('dns_ns', sa.String(length=255), nullable=True),
sa.Column('dns_mx', sa.String(length=255), nullable=True),
sa.Column('raw', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['fqdn_id'], ['fqdns.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'samples',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=True),
sa.Column('filename', sa.Text(), nullable=False),
sa.Column('md5', sa.String(length=32), nullable=False),
sa.Column('sha1', sa.String(length=40), nullable=False),
sa.Column('sha256', sa.String(length=64), nullable=False),
sa.Column('sha512', sa.String(length=128), nullable=False),
sa.Column('ctph', sa.Text(), nullable=False),
sa.Column('infected', sa.Integer(), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['samples.id'], ),
sa.ForeignKeyConstraint(['user_id'],
['users.id'],
name='fk_sample_user_id'),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'vulnerabilities',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('organization_id', sa.Integer(), nullable=True),
sa.Column('incident_id', sa.Integer(), nullable=True),
sa.Column('check_string', sa.Text(), nullable=True),
sa.Column('reporter_name', sa.String(length=255), nullable=True),
sa.Column('reporter_email', sa.String(length=255), nullable=True),
sa.Column('url', sa.Text(), nullable=True),
sa.Column('request_method',
sa.Enum('GET', 'POST', 'PUT'),
nullable=True),
sa.Column('request_data', sa.Text(), nullable=True),
sa.Column('request_response_code', sa.Integer(), nullable=True),
sa.Column('tested', sa.DateTime(), nullable=True),
sa.Column('reported', sa.DateTime(), nullable=True),
sa.Column('patched', sa.DateTime(), nullable=True),
sa.Column('published', sa.Boolean(), nullable=True),
sa.Column('scanable', sa.Boolean(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('deleted', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['organization_id'], ['organizations.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'reports',
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('type_id', sa.Integer(), nullable=True),
sa.Column('sample_id', sa.Integer(), nullable=True),
sa.Column('report', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['sample_id'], ['samples.id'], ),
sa.ForeignKeyConstraint(['type_id'], ['report_types.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'tags_vulnerabilities',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tag_id', sa.Integer(), nullable=True),
sa.Column('vulnerability_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], ),
sa.ForeignKeyConstraint(['vulnerability_id'],
['vulnerabilities.id'], ),
sa.PrimaryKeyConstraint('id')
)
def downgrade():
op.drop_table('tags_vulnerabilities')
op.drop_table('reports')
op.drop_table('vulnerabilities')
op.drop_table('samples')
op.drop_table('fqdns_typosquats')
op.drop_table('ah_startup_config_params')
op.drop_table('ah_runtime_config_params')
op.drop_table('users')
op.drop_table('ip_ranges')
op.drop_table('fqdns')
op.drop_table('emails_organizations')
op.drop_table('contacts')
op.drop_table('contactemails_organizations')
op.drop_table('asn')
op.drop_table('ah_startup_configs')
op.drop_table('ah_runtime_configs')
op.drop_index(op.f('ix_organizations_abbreviation'),
table_name='organizations')
op.drop_table('organizations')
op.drop_table('deliverable_files')
op.drop_table('ah_bots')
op.drop_table('tasks_taskmeta')
op.drop_table('tasks_groupmeta')
op.drop_table('tags')
op.drop_index(op.f('ix_roles_default'), table_name='roles')
op.drop_table('roles')
op.drop_table('report_types')
op.drop_table('organization_groups')
op.drop_table('emails')
op.drop_table('deliverables')
op.drop_table('ah_bot_types')
| 45.17619
| 77
| 0.61442
| 2,210
| 18,974
| 5.168778
| 0.083258
| 0.140068
| 0.185065
| 0.222358
| 0.81546
| 0.777291
| 0.71356
| 0.657621
| 0.596691
| 0.5636
| 0
| 0.011007
| 0.205176
| 18,974
| 419
| 78
| 45.28401
| 0.746436
| 0.007326
| 0
| 0.497512
| 0
| 0
| 0.16386
| 0.017794
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004975
| false
| 0.002488
| 0.007463
| 0
| 0.012438
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f0a107f31784cf0b4b054e0e9b564626ad235e60
| 143
|
py
|
Python
|
plugins/paragoo.py
|
aquatix/lifelog
|
8b5c5b747a40f5acea8fa12cccd1c81bb6cff24c
|
[
"BSD-Source-Code"
] | null | null | null |
plugins/paragoo.py
|
aquatix/lifelog
|
8b5c5b747a40f5acea8fa12cccd1c81bb6cff24c
|
[
"BSD-Source-Code"
] | 1
|
2021-11-04T15:29:53.000Z
|
2021-11-04T15:29:53.000Z
|
plugins/paragoo.py
|
aquatix/lifelog
|
8b5c5b747a40f5acea8fa12cccd1c81bb6cff24c
|
[
"BSD-Source-Code"
] | null | null | null |
# Write paragoo static site project files
def createproject(destinationdir):
"""Writes project files into `destinationdir`"""
return
| 20.428571
| 52
| 0.741259
| 15
| 143
| 7.066667
| 0.8
| 0.226415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174825
| 143
| 6
| 53
| 23.833333
| 0.898305
| 0.58042
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
f0ded471903547de613cb95f6a4768212f1c8ac8
| 62
|
py
|
Python
|
src/classes/__init__.py
|
N-Wouda/wordle
|
d09fff488f5760f4b428f672b28e1950bfdb6d7b
|
[
"MIT"
] | null | null | null |
src/classes/__init__.py
|
N-Wouda/wordle
|
d09fff488f5760f4b428f672b28e1950bfdb6d7b
|
[
"MIT"
] | 1
|
2022-01-24T08:00:21.000Z
|
2022-01-24T21:09:02.000Z
|
src/classes/__init__.py
|
N-Wouda/wordle
|
d09fff488f5760f4b428f672b28e1950bfdb6d7b
|
[
"MIT"
] | null | null | null |
from .Dictionary import Dictionary
from .Wordle import Wordle
| 20.666667
| 34
| 0.83871
| 8
| 62
| 6.5
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 62
| 2
| 35
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f0f1ccc99076b79f667abf1066282e31d4f5eef9
| 350
|
py
|
Python
|
test/execution/test_optional_inputs.py
|
riverlane/deltasimulator
|
02c9dc18c2eca3a5690920f93792062d1524da36
|
[
"MIT"
] | 8
|
2021-01-06T17:44:58.000Z
|
2021-11-17T11:16:34.000Z
|
test/execution/test_optional_inputs.py
|
KharchukS/deltasimulator
|
02c9dc18c2eca3a5690920f93792062d1524da36
|
[
"MIT"
] | null | null | null |
test/execution/test_optional_inputs.py
|
KharchukS/deltasimulator
|
02c9dc18c2eca3a5690920f93792062d1524da36
|
[
"MIT"
] | 2
|
2021-06-30T11:26:20.000Z
|
2021-07-12T19:02:33.000Z
|
import unittest
import deltalanguage as dl
from deltalanguage.test.execution import TestExecutionOptionalInputs
from test.execution.base import TestExecutionBaseDS
class TestExecutionOptionalInputsDS(TestExecutionBaseDS,
TestExecutionOptionalInputs):
pass
if __name__ == "__main__":
unittest.main()
| 20.588235
| 68
| 0.742857
| 28
| 350
| 9
| 0.607143
| 0.103175
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.211429
| 350
| 16
| 69
| 21.875
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0.022857
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.111111
| 0.444444
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
f0ffc7b2ad20347520d727267841e7653b7f84a9
| 303
|
py
|
Python
|
pylti1p3/message_validators/__init__.py
|
edubadges/pylti1.3
|
46534384b79076d254ff3e94676ae6155e77a05e
|
[
"MIT"
] | 62
|
2019-08-13T11:24:36.000Z
|
2022-03-11T16:24:55.000Z
|
pylti1p3/message_validators/__init__.py
|
edubadges/pylti1.3
|
46534384b79076d254ff3e94676ae6155e77a05e
|
[
"MIT"
] | 58
|
2019-08-13T13:27:27.000Z
|
2022-03-28T11:00:56.000Z
|
pylti1p3/message_validators/__init__.py
|
edubadges/pylti1.3
|
46534384b79076d254ff3e94676ae6155e77a05e
|
[
"MIT"
] | 35
|
2019-10-11T18:26:02.000Z
|
2022-03-29T08:33:00.000Z
|
from .deep_link import DeepLinkMessageValidator
from .resource_message import ResourceMessageValidator
from .privacy_launch import PrivacyLaunchValidator
def get_validators():
return [
DeepLinkMessageValidator(),
ResourceMessageValidator(),
PrivacyLaunchValidator(),
]
| 25.25
| 54
| 0.765677
| 22
| 303
| 10.363636
| 0.681818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178218
| 303
| 11
| 55
| 27.545455
| 0.915663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| true
| 0
| 0.333333
| 0.111111
| 0.555556
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
0b09e2b7cd8ea435326edfbbafd3b4e32b2dd93f
| 17
|
py
|
Python
|
plot/__init__.py
|
indranilsinharoy/iutils
|
1b102029306fa2947d69d8ca80d976d143f3d068
|
[
"MIT"
] | null | null | null |
plot/__init__.py
|
indranilsinharoy/iutils
|
1b102029306fa2947d69d8ca80d976d143f3d068
|
[
"MIT"
] | null | null | null |
plot/__init__.py
|
indranilsinharoy/iutils
|
1b102029306fa2947d69d8ca80d976d143f3d068
|
[
"MIT"
] | null | null | null |
# plotting utils
| 8.5
| 16
| 0.764706
| 2
| 17
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 17
| 1
| 17
| 17
| 0.928571
| 0.823529
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0b12ca319366a070f241558433fb7bb3d692a083
| 244
|
py
|
Python
|
products/admin.py
|
amid-africa/photoorder
|
407cf58b3dbd3e2144a8533f489889295f946776
|
[
"MIT"
] | null | null | null |
products/admin.py
|
amid-africa/photoorder
|
407cf58b3dbd3e2144a8533f489889295f946776
|
[
"MIT"
] | null | null | null |
products/admin.py
|
amid-africa/photoorder
|
407cf58b3dbd3e2144a8533f489889295f946776
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import ProductFigure, ProductCategory, Product, ProductImage
admin.site.register(ProductFigure)
admin.site.register(ProductCategory)
admin.site.register(Product)
admin.site.register(ProductImage)
| 27.111111
| 73
| 0.844262
| 28
| 244
| 7.357143
| 0.428571
| 0.174757
| 0.330097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069672
| 244
| 8
| 74
| 30.5
| 0.907489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9bd16deb8bb92474fb67f94928c36b5cae191b9d
| 151
|
py
|
Python
|
Script's/08 - Paquetes/Calculos/RedondeoPotencia/OperacionesRedondeoPotencia.py
|
CamiloBallen24/Python-PildorasInformaticas
|
a734ac064e34b01a2f64080d5391625a5de77f54
|
[
"Apache-2.0"
] | null | null | null |
Script's/08 - Paquetes/Calculos/RedondeoPotencia/OperacionesRedondeoPotencia.py
|
CamiloBallen24/Python-PildorasInformaticas
|
a734ac064e34b01a2f64080d5391625a5de77f54
|
[
"Apache-2.0"
] | null | null | null |
Script's/08 - Paquetes/Calculos/RedondeoPotencia/OperacionesRedondeoPotencia.py
|
CamiloBallen24/Python-PildorasInformaticas
|
a734ac064e34b01a2f64080d5391625a5de77f54
|
[
"Apache-2.0"
] | 1
|
2019-06-04T19:51:05.000Z
|
2019-06-04T19:51:05.000Z
|
def potencia(op1, op2):
print("El resultado de la potencia es: ", op1**op2)
def redondear(op1):
print("El resultado del redondeo es: ", round(op1))
| 25.166667
| 52
| 0.695364
| 24
| 151
| 4.375
| 0.583333
| 0.114286
| 0.304762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046875
| 0.152318
| 151
| 5
| 53
| 30.2
| 0.773438
| 0
| 0
| 0
| 0
| 0
| 0.410596
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
9bdde837e3e8ed9309cdd160f71290ad1b741be8
| 148
|
py
|
Python
|
slowMRI/models/__init__.py
|
bryanlimy/mri-super-resolution
|
9aa846dc3ee817b0188518b1fd0effe1fd33e043
|
[
"MIT"
] | 3
|
2021-04-20T11:22:46.000Z
|
2021-05-15T08:40:45.000Z
|
slowMRI/models/__init__.py
|
bryanlimy/mri-super-resolution
|
9aa846dc3ee817b0188518b1fd0effe1fd33e043
|
[
"MIT"
] | 1
|
2021-10-14T09:14:57.000Z
|
2021-10-14T10:40:31.000Z
|
slowMRI/models/__init__.py
|
bryanlimy/mri-super-resolution
|
9aa846dc3ee817b0188518b1fd0effe1fd33e043
|
[
"MIT"
] | 1
|
2021-05-15T08:40:55.000Z
|
2021-05-15T08:40:55.000Z
|
__all__ = ['mlp', 'registry', 'simple_models', 'unet']
from .mlp import *
from .registry import *
from .simple_models import *
from .unet import *
| 21.142857
| 54
| 0.695946
| 19
| 148
| 5.105263
| 0.421053
| 0.309278
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155405
| 148
| 6
| 55
| 24.666667
| 0.776
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
acb7626c8dbc69665ba14cc2386b9d8a220a5200
| 196
|
py
|
Python
|
django/contrib/databrowse/__init__.py
|
kix/django
|
5262a288df07daa050a0e17669c3f103f47a8640
|
[
"BSD-3-Clause"
] | 790
|
2015-01-03T02:13:39.000Z
|
2020-05-10T19:53:57.000Z
|
AppServer/lib/django-1.5/django/contrib/databrowse/__init__.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 1,361
|
2015-01-08T23:09:40.000Z
|
2020-04-14T00:03:04.000Z
|
AppServer/lib/django-1.5/django/contrib/databrowse/__init__.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 155
|
2015-01-08T22:59:31.000Z
|
2020-04-08T08:01:53.000Z
|
import warnings
from django.contrib.databrowse.sites import DatabrowsePlugin, ModelDatabrowse, DatabrowseSite, site
warnings.warn("The Databrowse contrib app is deprecated", DeprecationWarning)
| 32.666667
| 99
| 0.846939
| 21
| 196
| 7.904762
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091837
| 196
| 5
| 100
| 39.2
| 0.932584
| 0
| 0
| 0
| 0
| 0
| 0.204082
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
acd22e84838e0c3d736d9c20d13314096fa5f543
| 185
|
py
|
Python
|
aula019 - DICIONARIOS/aula019c.py
|
miradouro/CursoEmVideo-Python
|
cc7b05a9a4aad8e6ef3b29453d83370094d75e41
|
[
"MIT"
] | null | null | null |
aula019 - DICIONARIOS/aula019c.py
|
miradouro/CursoEmVideo-Python
|
cc7b05a9a4aad8e6ef3b29453d83370094d75e41
|
[
"MIT"
] | null | null | null |
aula019 - DICIONARIOS/aula019c.py
|
miradouro/CursoEmVideo-Python
|
cc7b05a9a4aad8e6ef3b29453d83370094d75e41
|
[
"MIT"
] | null | null | null |
pessoas = {'nome': 'Gustavo', 'sexo': 'M', 'idade': 22}
print(pessoas)
del pessoas['sexo']
print(pessoas)
pessoas['nome'] = 'Rafael'
print(pessoas)
pessoas['peso'] = 98.5
print(pessoas)
| 23.125
| 55
| 0.664865
| 25
| 185
| 4.92
| 0.52
| 0.390244
| 0.308943
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03012
| 0.102703
| 185
| 8
| 56
| 23.125
| 0.710843
| 0
| 0
| 0.5
| 0
| 0
| 0.209677
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
c59265397c36d8a887d4063e649c7044d79acc6d
| 216
|
py
|
Python
|
radiomicsfeatureextractionpipeline/backend/test/mock_ups/logic/entities/pet_series.py
|
Maastro-CDS-Imaging-Group/SQLite4Radiomics
|
e3a7afc181eec0fe04c18da00edc3772064e6758
|
[
"Apache-2.0"
] | null | null | null |
radiomicsfeatureextractionpipeline/backend/test/mock_ups/logic/entities/pet_series.py
|
Maastro-CDS-Imaging-Group/SQLite4Radiomics
|
e3a7afc181eec0fe04c18da00edc3772064e6758
|
[
"Apache-2.0"
] | 6
|
2021-06-09T19:39:27.000Z
|
2021-09-30T16:41:40.000Z
|
radiomicsfeatureextractionpipeline/backend/test/mock_ups/logic/entities/pet_series.py
|
Maastro-CDS-Imaging-Group/SQLite4Radiomics
|
e3a7afc181eec0fe04c18da00edc3772064e6758
|
[
"Apache-2.0"
] | null | null | null |
from logic.entities.pet_series import PetSeries
from test.mock_ups.logic.entities.series_with_image_slices import SeriesWithImageSlicesMockUp
class PetSeriesMockUp(PetSeries, SeriesWithImageSlicesMockUp):
pass
| 30.857143
| 93
| 0.87037
| 24
| 216
| 7.625
| 0.708333
| 0.142077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 216
| 6
| 94
| 36
| 0.924242
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
c5eb6ffe7b658f96f1e35040311c311564dfb0b4
| 250
|
py
|
Python
|
optional_packages/selenium/questions_three_selenium/exceptions/__init__.py
|
filfreire/questions-three
|
1d1d621d5647407bf2d1b271e0b9c7c9f1afc5c8
|
[
"MIT"
] | 5
|
2019-07-22T06:04:07.000Z
|
2021-07-23T06:01:51.000Z
|
optional_packages/selenium/questions_three_selenium/exceptions/__init__.py
|
filfreire/questions-three
|
1d1d621d5647407bf2d1b271e0b9c7c9f1afc5c8
|
[
"MIT"
] | 15
|
2020-07-28T17:33:40.000Z
|
2021-08-23T17:30:05.000Z
|
optional_packages/selenium/questions_three_selenium/exceptions/__init__.py
|
filfreire/questions-three
|
1d1d621d5647407bf2d1b271e0b9c7c9f1afc5c8
|
[
"MIT"
] | 4
|
2019-08-25T22:41:59.000Z
|
2020-10-21T14:28:15.000Z
|
class AllBrowsersBusy(RuntimeError):
pass
class BrowserStackTunnelClosed(RuntimeError):
pass
class TooManyElements(RuntimeError):
pass
class UnknownSelector(RuntimeError):
pass
class UnsupportedBrowser(RuntimeError):
pass
| 13.157895
| 45
| 0.768
| 20
| 250
| 9.6
| 0.4
| 0.416667
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172
| 250
| 18
| 46
| 13.888889
| 0.927536
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
c5f3fc90b9587170c9edc6556b1b98d2bd5ee19a
| 78
|
py
|
Python
|
interfaces/python/test/evaluators_test.py
|
kernhanda/ELL
|
370c0de4e4c190ca0cb43654b4246b3686bca464
|
[
"MIT"
] | null | null | null |
interfaces/python/test/evaluators_test.py
|
kernhanda/ELL
|
370c0de4e4c190ca0cb43654b4246b3686bca464
|
[
"MIT"
] | null | null | null |
interfaces/python/test/evaluators_test.py
|
kernhanda/ELL
|
370c0de4e4c190ca0cb43654b4246b3686bca464
|
[
"MIT"
] | 1
|
2020-12-10T17:49:07.000Z
|
2020-12-10T17:49:07.000Z
|
import ell
def test():
print("evaluators_test.test -- TBD")
return 0
| 13
| 40
| 0.641026
| 11
| 78
| 4.454545
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 0.230769
| 78
| 5
| 41
| 15.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.346154
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.75
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
a8605d0689c191eac805133f6516b7d7242fe33e
| 251
|
py
|
Python
|
src/fbcrawler/manager.py
|
MatthewWong68/SocialMediaDataCrawler
|
d8bd3cbf20a8ba328b60779821761a16866ac0bb
|
[
"MIT"
] | null | null | null |
src/fbcrawler/manager.py
|
MatthewWong68/SocialMediaDataCrawler
|
d8bd3cbf20a8ba328b60779821761a16866ac0bb
|
[
"MIT"
] | null | null | null |
src/fbcrawler/manager.py
|
MatthewWong68/SocialMediaDataCrawler
|
d8bd3cbf20a8ba328b60779821761a16866ac0bb
|
[
"MIT"
] | null | null | null |
class Manager:
place = ""
number = 0
def __init__(self, place, number):
self.place = place
self.number = number
def toString(self):
print("Place: " + self.place + ". Number: " + self.number + ".")
print(" ")
| 20.916667
| 71
| 0.537849
| 27
| 251
| 4.851852
| 0.37037
| 0.251908
| 0.229008
| 0.290076
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005747
| 0.306773
| 251
| 12
| 72
| 20.916667
| 0.747126
| 0
| 0
| 0
| 0
| 0
| 0.078838
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0
| 0
| 0.555556
| 0.222222
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a8bfc1967e09bad21d09b6140d05773fa1540b44
| 103
|
py
|
Python
|
digsby/src/gui/toolbox/__init__.py
|
ifwe/digsby
|
f5fe00244744aa131e07f09348d10563f3d8fa99
|
[
"Python-2.0"
] | 35
|
2015-08-15T14:32:38.000Z
|
2021-12-09T16:21:26.000Z
|
digsby/src/gui/toolbox/__init__.py
|
niterain/digsby
|
16a62c7df1018a49eaa8151c0f8b881c7e252949
|
[
"Python-2.0"
] | 4
|
2015-09-12T10:42:57.000Z
|
2017-02-27T04:05:51.000Z
|
digsby/src/gui/toolbox/__init__.py
|
niterain/digsby
|
16a62c7df1018a49eaa8151c0f8b881c7e252949
|
[
"Python-2.0"
] | 15
|
2015-07-10T23:58:07.000Z
|
2022-01-23T22:16:33.000Z
|
from .toolbox import *
from dnd import SimpleDropTarget
#from imgfx import drawreflected
import fontfx
| 20.6
| 32
| 0.834951
| 13
| 103
| 6.615385
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135922
| 103
| 4
| 33
| 25.75
| 0.966292
| 0.300971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a8c19ff29b381be6ce55ac24bf1ebebed4431d8c
| 163
|
py
|
Python
|
pgrpc/__init__.py
|
MrLi008/utornado
|
aa4f206f9b4bc15527866eed03155296366da20a
|
[
"Apache-2.0"
] | null | null | null |
pgrpc/__init__.py
|
MrLi008/utornado
|
aa4f206f9b4bc15527866eed03155296366da20a
|
[
"Apache-2.0"
] | null | null | null |
pgrpc/__init__.py
|
MrLi008/utornado
|
aa4f206f9b4bc15527866eed03155296366da20a
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
'''
:func
:author: MrLi
:date:
'''
import sys
import os
import codecs
import json
def main():
pass
if __name__ == '__main__':
main()
| 8.15
| 26
| 0.613497
| 22
| 163
| 4.181818
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008065
| 0.239264
| 163
| 19
| 27
| 8.578947
| 0.733871
| 0.257669
| 0
| 0
| 0
| 0
| 0.071429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| true
| 0.125
| 0.5
| 0
| 0.625
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
a8cd748bbfc88826f2e714ca8b59acf9490d8aed
| 159
|
py
|
Python
|
app/models/__init__.py
|
puentebravo/python-flask-practice
|
966595fa4c0e14c1660d3552676b66d7bd4b0032
|
[
"MIT"
] | null | null | null |
app/models/__init__.py
|
puentebravo/python-flask-practice
|
966595fa4c0e14c1660d3552676b66d7bd4b0032
|
[
"MIT"
] | 6
|
2021-05-16T17:49:42.000Z
|
2021-06-12T01:29:24.000Z
|
app/models/__init__.py
|
puentebravo/python-flask-practice
|
966595fa4c0e14c1660d3552676b66d7bd4b0032
|
[
"MIT"
] | null | null | null |
# Consolidates package contents for export to other modules
from .User import User
from .Post import Post
from .Comment import Comment
from .Vote import Vote
| 22.714286
| 59
| 0.805031
| 24
| 159
| 5.333333
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163522
| 159
| 6
| 60
| 26.5
| 0.962406
| 0.358491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a8d100ee007bd87831cca78c34a026ad04c3aa67
| 1,403
|
py
|
Python
|
hubspot/cms/url_redirects/__init__.py
|
Ronfer/hubspot-api-python
|
1c87274ecbba4aa3c7728f890ccc6e77b2b6d2e4
|
[
"Apache-2.0"
] | 117
|
2020-04-06T08:22:53.000Z
|
2022-03-18T03:41:29.000Z
|
hubspot/cms/url_redirects/__init__.py
|
Ronfer/hubspot-api-python
|
1c87274ecbba4aa3c7728f890ccc6e77b2b6d2e4
|
[
"Apache-2.0"
] | 62
|
2020-04-06T16:21:06.000Z
|
2022-03-17T16:50:44.000Z
|
hubspot/cms/url_redirects/__init__.py
|
Ronfer/hubspot-api-python
|
1c87274ecbba4aa3c7728f890ccc6e77b2b6d2e4
|
[
"Apache-2.0"
] | 45
|
2020-04-06T16:13:52.000Z
|
2022-03-30T21:33:17.000Z
|
# coding: utf-8
# flake8: noqa
"""
URL redirects
URL redirect operations # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
__version__ = "1.0.0"
# import apis into sdk package
from hubspot.cms.url_redirects.api.redirects_api import RedirectsApi
# import ApiClient
from hubspot.cms.url_redirects.api_client import ApiClient
from hubspot.cms.url_redirects.configuration import Configuration
from hubspot.cms.url_redirects.exceptions import OpenApiException
from hubspot.cms.url_redirects.exceptions import ApiTypeError
from hubspot.cms.url_redirects.exceptions import ApiValueError
from hubspot.cms.url_redirects.exceptions import ApiKeyError
from hubspot.cms.url_redirects.exceptions import ApiException
# import models into sdk package
from hubspot.cms.url_redirects.models.collection_response_with_total_url_mapping import CollectionResponseWithTotalUrlMapping
from hubspot.cms.url_redirects.models.error import Error
from hubspot.cms.url_redirects.models.error_detail import ErrorDetail
from hubspot.cms.url_redirects.models.next_page import NextPage
from hubspot.cms.url_redirects.models.paging import Paging
from hubspot.cms.url_redirects.models.url_mapping import UrlMapping
from hubspot.cms.url_redirects.models.url_mapping_create_request_body import UrlMappingCreateRequestBody
| 35.974359
| 125
| 0.846044
| 188
| 1,403
| 6.111702
| 0.335106
| 0.167102
| 0.182768
| 0.221932
| 0.527415
| 0.527415
| 0.461271
| 0.142733
| 0
| 0
| 0
| 0.007098
| 0.096222
| 1,403
| 38
| 126
| 36.921053
| 0.899054
| 0.1732
| 0
| 0
| 1
| 0
| 0.004417
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.941176
| 0
| 0.941176
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
764f06462de59751a891593e233e4cb1efa74d87
| 2,438
|
py
|
Python
|
tfrecordcreator.py
|
anilgurses/DentalBioFeature
|
530b4e9423d7a4a23e228af5e2c79e5e9c1c0ba2
|
[
"MIT"
] | 3
|
2020-06-20T22:54:04.000Z
|
2020-07-28T01:09:40.000Z
|
tfrecordcreator.py
|
anilgurses/DentalBioFeature
|
530b4e9423d7a4a23e228af5e2c79e5e9c1c0ba2
|
[
"MIT"
] | 6
|
2020-11-13T18:55:26.000Z
|
2022-02-10T01:44:49.000Z
|
tfrecordcreator.py
|
anilgurses/DentalBioFeature
|
530b4e9423d7a4a23e228af5e2c79e5e9c1c0ba2
|
[
"MIT"
] | 2
|
2020-07-28T01:09:42.000Z
|
2021-02-24T13:18:08.000Z
|
import numpy as np
import tensorflow as tf
from random import shuffle
import glob
import cv2
import pathlib
import pandas as pd
import numpy as np
from PIL import Image
import os
import sys
tooth = pd.read_csv('tooth_dataset.csv')
same = tooth[tooth["isTooth"] == True].iloc[0:250]
dif = tooth[tooth["isTooth"] == False]
frames1 = [same, dif]
train = pd.concat(frames1)
val_data = pd.read_csv('val_dataset.csv')
print("Creating the training tfrecord file")
recordFileName = ("train.tfrecord")
# tfrecord file writer
writer = tf.io.TFRecordWriter(recordFileName)
for index, val in train.iterrows():
img = os.path.join("Tooth_Data", val["img"])
img_path = os.path.join(img)
img_raw = open(img_path, 'rb').read()
image_shape = tf.image.decode_jpeg(img_raw).shape
label = 0
if val["isTooth"]:
label = 1
personid = int(val["personid"])
toothid = int(val["toothid"])
example = tf.train.Example(features=tf.train.Features(
feature={
"img_raw": tf.train.Feature(bytes_list=tf.train.BytesList(value=[img_raw])),
"label": tf.train.Feature(int64_list=tf.train.Int64List(value=[label])),
"personid": tf.train.Feature(int64_list=tf.train.Int64List(value=[personid])),
"toothid": tf.train.Feature(int64_list=tf.train.Int64List(value=[toothid]))
}))
writer.write(example.SerializeToString())
writer.close()
sys.stdout.flush()
print("Creating the validation tfrecord file")
recordFileName = ("val.tfrecord")
# tfrecord file writer
writer = tf.io.TFRecordWriter(recordFileName)
for index, val in val_data.iterrows():
img = os.path.join("Tooth_Data", val["img"])
img_path = os.path.join(img)
img_raw = open(img_path, 'rb').read()
image_shape = tf.image.decode_jpeg(img_raw).shape
label = 0
if val["isTooth"]:
label = 1
personid = int(val["personid"])
toothid = int(val["toothid"])
example = tf.train.Example(features=tf.train.Features(
feature={
"img_raw": tf.train.Feature(bytes_list=tf.train.BytesList(value=[img_raw])),
"label": tf.train.Feature(int64_list=tf.train.Int64List(value=[label])),
"personid": tf.train.Feature(int64_list=tf.train.Int64List(value=[personid])),
"toothid": tf.train.Feature(int64_list=tf.train.Int64List(value=[toothid]))
}))
writer.write(example.SerializeToString())
writer.close()
| 28.682353
| 90
| 0.674323
| 331
| 2,438
| 4.870091
| 0.238671
| 0.086849
| 0.069479
| 0.07072
| 0.725806
| 0.725806
| 0.725806
| 0.725806
| 0.725806
| 0.725806
| 0
| 0.017413
| 0.175554
| 2,438
| 84
| 91
| 29.02381
| 0.784577
| 0.016817
| 0
| 0.645161
| 0
| 0
| 0.113665
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.177419
| 0
| 0.177419
| 0.032258
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
764fb2d4ade1d63fd650b20cc152cb235f51c3de
| 7,007
|
py
|
Python
|
plugins/habitat_plugin/habitat_sensors.py
|
mattdeitke/allenact-1
|
70f106b32a38424e862399a76d84f607838063be
|
[
"MIT"
] | null | null | null |
plugins/habitat_plugin/habitat_sensors.py
|
mattdeitke/allenact-1
|
70f106b32a38424e862399a76d84f607838063be
|
[
"MIT"
] | null | null | null |
plugins/habitat_plugin/habitat_sensors.py
|
mattdeitke/allenact-1
|
70f106b32a38424e862399a76d84f607838063be
|
[
"MIT"
] | null | null | null |
from typing import Any, Optional, Tuple
import gym
import numpy as np
from pyquaternion import Quaternion
from core.base_abstractions.sensor import (
Sensor,
RGBSensor,
RGBResNetSensor,
DepthSensor,
DepthResNetSensor,
)
from core.base_abstractions.task import Task
from plugins.habitat_plugin.habitat_environment import HabitatEnvironment
from plugins.habitat_plugin.habitat_tasks import PointNavTask # type: ignore
from utils.misc_utils import prepare_locals_for_super
class RGBSensorHabitat(RGBSensor[HabitatEnvironment, Task[HabitatEnvironment]]):
# For backwards compatibility
def __init__(
self,
use_resnet_normalization: bool = False,
mean: Optional[np.ndarray] = np.array(
[[[0.485, 0.456, 0.406]]], dtype=np.float32
),
stdev: Optional[np.ndarray] = np.array(
[[[0.229, 0.224, 0.225]]], dtype=np.float32
),
height: Optional[int] = None,
width: Optional[int] = None,
uuid: str = "rgb",
output_shape: Optional[Tuple[int, ...]] = None,
output_channels: int = 3,
unnormalized_infimum: float = 0.0,
unnormalized_supremum: float = 1.0,
scale_first: bool = True,
**kwargs: Any
):
super().__init__(**prepare_locals_for_super(locals()))
def frame_from_env(self, env: HabitatEnvironment) -> np.ndarray:
return env.current_frame["rgb"].copy()
class RGBResNetSensorHabitat(
RGBResNetSensor[HabitatEnvironment, Task[HabitatEnvironment]]
):
# For backwards compatibility
def __init__(
self,
use_resnet_normalization: bool = True,
mean: Optional[np.ndarray] = np.array(
[[[0.485, 0.456, 0.406]]], dtype=np.float32
),
stdev: Optional[np.ndarray] = np.array(
[[[0.229, 0.224, 0.225]]], dtype=np.float32
),
height: Optional[int] = None,
width: Optional[int] = None,
uuid: str = "rgb",
output_shape: Optional[Tuple[int, ...]] = (2048,),
output_channels: Optional[int] = None,
unnormalized_infimum: float = -np.inf,
unnormalized_supremum: float = np.inf,
scale_first: bool = False,
**kwargs: Any
):
super().__init__(**prepare_locals_for_super(locals()))
def frame_from_env(self, env: HabitatEnvironment) -> np.ndarray:
return env.current_frame["rgb"].copy()
class DepthSensorHabitat(DepthSensor[HabitatEnvironment, Task[HabitatEnvironment]]):
# For backwards compatibility
def __init__(
self,
use_resnet_normalization: Optional[bool] = None,
use_normalization: Optional[bool] = None,
mean: Optional[np.ndarray] = np.array([[0.5]], dtype=np.float32),
stdev: Optional[np.ndarray] = np.array([[0.25]], dtype=np.float32),
height: Optional[int] = None,
width: Optional[int] = None,
uuid: str = "depth",
output_shape: Optional[Tuple[int, ...]] = None,
output_channels: int = 1,
unnormalized_infimum: float = 0.0,
unnormalized_supremum: float = 5.0,
scale_first: bool = False,
**kwargs: Any
):
# Give priority to use_normalization, but use_resnet_normalization for backward compat. if not set
if use_resnet_normalization is not None and use_normalization is None:
use_normalization = use_resnet_normalization
elif use_normalization is None:
use_normalization = False
super().__init__(**prepare_locals_for_super(locals()))
def frame_from_env(self, env: HabitatEnvironment) -> np.ndarray:
return env.current_frame["depth"].copy()
class DepthResNetSensorHabitat(
DepthResNetSensor[HabitatEnvironment, Task[HabitatEnvironment]]
):
# For backwards compatibility
def __init__(
self,
use_resnet_normalization: Optional[bool] = None,
use_normalization: Optional[bool] = None,
mean: Optional[np.ndarray] = np.array([[0.5]], dtype=np.float32),
stdev: Optional[np.ndarray] = np.array([[0.25]], dtype=np.float32),
height: Optional[int] = None,
width: Optional[int] = None,
uuid: str = "depth",
output_shape: Optional[Tuple[int, ...]] = (2048,),
output_channels: Optional[int] = None,
unnormalized_infimum: float = -np.inf,
unnormalized_supremum: float = np.inf,
scale_first: bool = False,
**kwargs: Any
):
# Give priority to use_normalization, but use_resnet_normalization for backward compat. if not set
if use_resnet_normalization is not None and use_normalization is None:
use_normalization = use_resnet_normalization
elif use_normalization is None:
use_normalization = False
super().__init__(**prepare_locals_for_super(locals()))
def frame_from_env(self, env: HabitatEnvironment) -> np.ndarray:
return env.current_frame["depth"].copy()
class TargetCoordinatesSensorHabitat(Sensor[HabitatEnvironment, PointNavTask]):
def __init__(
self, coordinate_dims: int, uuid: str = "target_coordinates_ind", **kwargs: Any
):
# Distance is a non-negative real and angle is normalized to the range (-Pi, Pi] or [-Pi, Pi)
observation_space = gym.spaces.Box(
np.float32(-3.15), np.float32(1000), shape=(coordinate_dims,)
)
super().__init__(**prepare_locals_for_super(locals()))
def get_observation(
self,
env: HabitatEnvironment,
task: Optional[PointNavTask],
*args: Any,
**kwargs: Any
) -> Any:
frame = env.current_frame
goal = frame["pointgoal_with_gps_compass"]
return goal
class TargetObjectSensorHabitat(Sensor[HabitatEnvironment, PointNavTask]):
def __init__(self, uuid: str = "target_object_id", **kwargs: Any):
observation_space = gym.spaces.Discrete(38)
super().__init__(**prepare_locals_for_super(locals()))
def get_observation(
self,
env: HabitatEnvironment,
task: Optional[PointNavTask],
*args: Any,
**kwargs: Any
) -> Any:
frame = env.current_frame
goal = frame["objectgoal"][0]
return goal
class AgentCoordinatesSensorHabitat(Sensor[HabitatEnvironment, PointNavTask]):
def __init__(self, uuid: str = "agent_position_and_rotation", **kwargs: Any):
observation_space = gym.spaces.Box(
np.float32(-1000), np.float32(1000), shape=(4,)
)
super().__init__(**prepare_locals_for_super(locals()))
def get_observation(
self,
env: HabitatEnvironment,
task: Optional[PointNavTask],
*args: Any,
**kwargs: Any
) -> Any:
position = env.env.sim.get_agent_state().position
quaternion = Quaternion(env.env.sim.get_agent_state().rotation.components)
return np.array([position[0], position[1], position[2], quaternion.radians])
| 35.568528
| 106
| 0.644213
| 778
| 7,007
| 5.577121
| 0.195373
| 0.024891
| 0.050703
| 0.038719
| 0.772528
| 0.758239
| 0.727357
| 0.710302
| 0.661904
| 0.652224
| 0
| 0.022928
| 0.240617
| 7,007
| 196
| 107
| 35.75
| 0.79252
| 0.058513
| 0
| 0.726708
| 0
| 0
| 0.020188
| 0.011384
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0.006211
| 0.055901
| 0.024845
| 0.229814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
767d86eb027f71ae93503dce9127305023624182
| 189
|
py
|
Python
|
webapp/news/admin.py
|
Sollimann/momentum_web
|
f1fb50def9cc8cb59d2303a4b1dba580e32abbd4
|
[
"Apache-2.0"
] | null | null | null |
webapp/news/admin.py
|
Sollimann/momentum_web
|
f1fb50def9cc8cb59d2303a4b1dba580e32abbd4
|
[
"Apache-2.0"
] | null | null | null |
webapp/news/admin.py
|
Sollimann/momentum_web
|
f1fb50def9cc8cb59d2303a4b1dba580e32abbd4
|
[
"Apache-2.0"
] | null | null | null |
from django.contrib import admin
from .models import Post, Event, Slideshow
# Register your models here.
admin.site.register(Post)
admin.site.register(Event)
admin.site.register(Slideshow)
| 27
| 42
| 0.809524
| 27
| 189
| 5.666667
| 0.481481
| 0.176471
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 189
| 7
| 43
| 27
| 0.894737
| 0.137566
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
76a5c71139dc0a1b1043bbd2b7123e98044e4dec
| 254
|
py
|
Python
|
store/models.py
|
Gustaf26/django-backend-app
|
fe5b409a16623ac7121098fa62a2077204cd9901
|
[
"MIT"
] | 1
|
2022-01-07T15:52:54.000Z
|
2022-01-07T15:52:54.000Z
|
store/models.py
|
Gustaf26/django-backend-app
|
fe5b409a16623ac7121098fa62a2077204cd9901
|
[
"MIT"
] | null | null | null |
store/models.py
|
Gustaf26/django-backend-app
|
fe5b409a16623ac7121098fa62a2077204cd9901
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
from django.db import models
class Product(models.Model):
name = models.CharField(max_length=100)
price = models.CharField(max_length=100)
category = models.CharField(max_length=200)
| 23.090909
| 47
| 0.755906
| 36
| 254
| 5.25
| 0.527778
| 0.238095
| 0.285714
| 0.380952
| 0.539683
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04186
| 0.153543
| 254
| 10
| 48
| 25.4
| 0.837209
| 0.094488
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
76ad17a427132fb7eeec22f05801f057f10b1f7b
| 165
|
py
|
Python
|
backend/accounts/tasks.py
|
triippz-tech/TheraQ
|
e157b6dc482b7411845dfe28b4e50b1859002c9f
|
[
"MIT"
] | null | null | null |
backend/accounts/tasks.py
|
triippz-tech/TheraQ
|
e157b6dc482b7411845dfe28b4e50b1859002c9f
|
[
"MIT"
] | null | null | null |
backend/accounts/tasks.py
|
triippz-tech/TheraQ
|
e157b6dc482b7411845dfe28b4e50b1859002c9f
|
[
"MIT"
] | null | null | null |
from django.core import management
from theraq.celery import app as celery_app
@celery_app.task
def clearsessions():
management.call_command('clearsessions')
| 18.333333
| 44
| 0.8
| 22
| 165
| 5.863636
| 0.636364
| 0.139535
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 165
| 8
| 45
| 20.625
| 0.895833
| 0
| 0
| 0
| 0
| 0
| 0.078788
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
76b9e73cc3cb0a1bc29c34cb6aad955e6f10d61f
| 289
|
py
|
Python
|
yellowbrick/gridsearch/__init__.py
|
souravsingh/yellowbrick
|
a5941a6c47fbe5264f3622bc15276ba618bbe1d0
|
[
"Apache-2.0"
] | 20
|
2018-03-24T02:29:20.000Z
|
2022-03-03T05:01:40.000Z
|
yellowbrick/gridsearch/__init__.py
|
souravsingh/yellowbrick
|
a5941a6c47fbe5264f3622bc15276ba618bbe1d0
|
[
"Apache-2.0"
] | 4
|
2018-03-20T12:01:17.000Z
|
2019-04-07T16:02:19.000Z
|
yellowbrick/gridsearch/__init__.py
|
souravsingh/yellowbrick
|
a5941a6c47fbe5264f3622bc15276ba618bbe1d0
|
[
"Apache-2.0"
] | 5
|
2018-03-17T08:18:57.000Z
|
2019-11-15T02:20:20.000Z
|
"""
Visualizers for the results of GridSearchCV.
"""
##########################################################################
## Imports
##########################################################################
## Hoist visualizers into the gridsearch namespace
from .pcolor import *
| 26.272727
| 74
| 0.356401
| 16
| 289
| 6.4375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086505
| 289
| 10
| 75
| 28.9
| 0.390152
| 0.349481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
76c86a3d2f6fb4780071598b8e9acc863cb3d0b0
| 381
|
py
|
Python
|
back/api/permissions/isAdmin.py
|
Diplow/thebigpicture
|
4555193604be17eb09ee69a0d5420ccc0ab2dfe2
|
[
"MIT"
] | 9
|
2020-04-27T06:08:24.000Z
|
2020-09-16T14:22:02.000Z
|
back/api/permissions/isAdmin.py
|
Diplow/thebigpicture
|
4555193604be17eb09ee69a0d5420ccc0ab2dfe2
|
[
"MIT"
] | 40
|
2019-10-31T14:31:36.000Z
|
2022-02-10T07:02:44.000Z
|
back/api/permissions/isAdmin.py
|
Diplow/thebigpicture
|
4555193604be17eb09ee69a0d5420ccc0ab2dfe2
|
[
"MIT"
] | 4
|
2020-07-30T09:32:49.000Z
|
2020-09-06T19:01:57.000Z
|
from rest_framework import permissions
class IsAdmin(permissions.BasePermission):
def has_permission(self, request, view):
return request.user and request.user.is_staff
class IsAdminOrReadOnly(permissions.BasePermission):
def has_permission(self, request, view):
return request.method in permissions.SAFE_METHODS or (request.user and request.user.is_staff)
| 27.214286
| 101
| 0.790026
| 48
| 381
| 6.145833
| 0.520833
| 0.149153
| 0.189831
| 0.210169
| 0.661017
| 0.661017
| 0.661017
| 0.467797
| 0.467797
| 0.467797
| 0
| 0
| 0.136483
| 381
| 13
| 102
| 29.307692
| 0.896657
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.285714
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
76cb3645224d69bd0d9d8d69cb91fae1d6040b39
| 259
|
py
|
Python
|
pysatSpaceWeather/instruments/__init__.py
|
JonathonMSmith/pysatSpaceWeather
|
b403a14bd9a37dd010e97be6e5da15c54a87b888
|
[
"BSD-3-Clause"
] | 3
|
2021-02-02T05:33:46.000Z
|
2022-01-20T16:54:35.000Z
|
pysatSpaceWeather/instruments/__init__.py
|
JonathonMSmith/pysatSpaceWeather
|
b403a14bd9a37dd010e97be6e5da15c54a87b888
|
[
"BSD-3-Clause"
] | 48
|
2020-08-13T22:05:06.000Z
|
2022-01-21T22:48:14.000Z
|
pysatSpaceWeather/instruments/__init__.py
|
JonathonMSmith/pysatSpaceWeather
|
b403a14bd9a37dd010e97be6e5da15c54a87b888
|
[
"BSD-3-Clause"
] | 3
|
2021-02-02T05:33:54.000Z
|
2021-08-19T17:14:24.000Z
|
from pysatSpaceWeather.instruments import methods # noqa F401
__all__ = ['ace_epam', 'ace_mag', 'ace_sis', 'ace_swepam',
'sw_dst', 'sw_f107', 'sw_kp']
for inst in __all__:
exec("from pysatSpaceWeather.instruments import {x}".format(x=inst))
| 32.375
| 72
| 0.694981
| 35
| 259
| 4.714286
| 0.657143
| 0.254545
| 0.387879
| 0.460606
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02765
| 0.162162
| 259
| 7
| 73
| 37
| 0.732719
| 0.034749
| 0
| 0
| 0
| 0
| 0.383065
| 0.116935
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
4f4020d751ac41b3247682cf9793bfc3f2e33c8e
| 113
|
py
|
Python
|
util/__init__.py
|
Ramesh-X/Image-Enhancer
|
4eb98c4652f99ecc49f966c1c0d7cb133b6f76a4
|
[
"MIT"
] | 4
|
2018-12-24T17:12:37.000Z
|
2019-07-29T09:21:22.000Z
|
util/__init__.py
|
Ramesh-X/Image-Enhancer
|
4eb98c4652f99ecc49f966c1c0d7cb133b6f76a4
|
[
"MIT"
] | null | null | null |
util/__init__.py
|
Ramesh-X/Image-Enhancer
|
4eb98c4652f99ecc49f966c1c0d7cb133b6f76a4
|
[
"MIT"
] | 1
|
2022-03-08T07:44:56.000Z
|
2022-03-08T07:44:56.000Z
|
from .process_thread import ProcessThread
from .worker_queue import WorkerQueue
from .singleton import Singleton
| 28.25
| 41
| 0.867257
| 14
| 113
| 6.857143
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106195
| 113
| 3
| 42
| 37.666667
| 0.950495
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4f8e6c27ab90d069a301069f8ac20bdefc801765
| 46
|
py
|
Python
|
app/models/event.py
|
immortel32/Sword_Sorcery_Story_Generator
|
7978dfc335813362b2d94c455b970f58421123c8
|
[
"MIT"
] | 2
|
2021-04-01T00:50:22.000Z
|
2021-04-01T02:18:45.000Z
|
app/models/event.py
|
immortel32/Sword_Sorcery_Story_Generator
|
7978dfc335813362b2d94c455b970f58421123c8
|
[
"MIT"
] | 1
|
2021-04-01T21:39:44.000Z
|
2021-04-01T21:39:44.000Z
|
app/models/event.py
|
immortel32/Sword_Sorcery_Story_Generator
|
7978dfc335813362b2d94c455b970f58421123c8
|
[
"MIT"
] | 1
|
2021-04-01T01:03:33.000Z
|
2021-04-01T01:03:33.000Z
|
class Event:
reference: str
name: str
| 11.5
| 18
| 0.630435
| 6
| 46
| 4.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.304348
| 46
| 3
| 19
| 15.333333
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
4f9ce86bbbc1fd77b0aa0011f2328a132709a067
| 34
|
py
|
Python
|
python/testData/pyi/type/functionReturnType/FunctionReturnType.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/pyi/type/functionReturnType/FunctionReturnType.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/pyi/type/functionReturnType/FunctionReturnType.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def f():
pass
<caret>x = f()
| 6.8
| 14
| 0.441176
| 6
| 34
| 2.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.323529
| 34
| 4
| 15
| 8.5
| 0.652174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.333333
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
4faccfc4a5c451e83968c8e8f19af2f97bad6f52
| 85
|
py
|
Python
|
seglibpython/seglib/clustering/__init__.py
|
DerThorsten/seglib
|
4655079e390e301dd93e53f5beed6c9737d6df9f
|
[
"MIT"
] | null | null | null |
seglibpython/seglib/clustering/__init__.py
|
DerThorsten/seglib
|
4655079e390e301dd93e53f5beed6c9737d6df9f
|
[
"MIT"
] | null | null | null |
seglibpython/seglib/clustering/__init__.py
|
DerThorsten/seglib
|
4655079e390e301dd93e53f5beed6c9737d6df9f
|
[
"MIT"
] | null | null | null |
from mini_batch_kmeans import MiniBatchKMeans
#from em import expectationMaximization
| 42.5
| 45
| 0.905882
| 10
| 85
| 7.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082353
| 85
| 2
| 46
| 42.5
| 0.961538
| 0.447059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
96c532d363a19b553872f3772a8ec6c046f08086
| 118
|
py
|
Python
|
python/katana/local/graph.py
|
origandrew/katana
|
456d64cf48a9d474dc35fb17e4d841bfa7a2f383
|
[
"BSD-3-Clause"
] | 1
|
2022-03-17T11:55:26.000Z
|
2022-03-17T11:55:26.000Z
|
python/katana/local/graph.py
|
origandrew/katana
|
456d64cf48a9d474dc35fb17e4d841bfa7a2f383
|
[
"BSD-3-Clause"
] | null | null | null |
python/katana/local/graph.py
|
origandrew/katana
|
456d64cf48a9d474dc35fb17e4d841bfa7a2f383
|
[
"BSD-3-Clause"
] | null | null | null |
import katana.local._graph_numba
from katana.local_native import Graph, TxnContext
__all__ = ["Graph", "TxnContext"]
| 23.6
| 49
| 0.79661
| 15
| 118
| 5.8
| 0.6
| 0.252874
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 118
| 4
| 50
| 29.5
| 0.820755
| 0
| 0
| 0
| 0
| 0
| 0.127119
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
96dbdcbbc091fbe87ab50355e2a07bce37e1c5c0
| 624
|
py
|
Python
|
Server/Schema/UserSchema.py
|
admantiumblack/invest-trigger-fastapi
|
f21fb7d7b512bb80a5da3000bdb581023ac7a177
|
[
"MIT"
] | null | null | null |
Server/Schema/UserSchema.py
|
admantiumblack/invest-trigger-fastapi
|
f21fb7d7b512bb80a5da3000bdb581023ac7a177
|
[
"MIT"
] | null | null | null |
Server/Schema/UserSchema.py
|
admantiumblack/invest-trigger-fastapi
|
f21fb7d7b512bb80a5da3000bdb581023ac7a177
|
[
"MIT"
] | null | null | null |
from Server.Schema.BaseSchema import BaseSchemaDTO
from pydantic.fields import Field
from typing import Optional
class UserSchemaDTO(BaseSchemaDTO):
userId: Optional[int] = Field(alias='userId')
fullName: Optional[str] = Field(alias='fullName')
username: Optional[str] = Field(alias='username')
email: Optional[str] = Field(alias='email')
password: Optional[str] = Field(alias='password')
class UserRegisterSchemaDTO(BaseSchemaDTO):
fullName: str = Field(alias='fullName')
username: str = Field(alias='username')
email: str = Field(alias='email')
password: str = Field(alias='password')
| 39
| 53
| 0.725962
| 71
| 624
| 6.380282
| 0.309859
| 0.198676
| 0.229581
| 0.18543
| 0.357616
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142628
| 624
| 16
| 54
| 39
| 0.846729
| 0
| 0
| 0
| 0
| 0
| 0.1024
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.142857
| 0.214286
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
96e044ca00de4dd6abe4aa15774c1501aeaa47a0
| 612
|
py
|
Python
|
challenges/sorting_algos/selection_sort/test_selection.py
|
scott-currie/data_structures_and_algorithms
|
04c1f50c01dbe6ee15c3f0a1155cc2c9528bdd06
|
[
"MIT"
] | null | null | null |
challenges/sorting_algos/selection_sort/test_selection.py
|
scott-currie/data_structures_and_algorithms
|
04c1f50c01dbe6ee15c3f0a1155cc2c9528bdd06
|
[
"MIT"
] | null | null | null |
challenges/sorting_algos/selection_sort/test_selection.py
|
scott-currie/data_structures_and_algorithms
|
04c1f50c01dbe6ee15c3f0a1155cc2c9528bdd06
|
[
"MIT"
] | null | null | null |
import pytest
import random
from .selection import selection_sort
def test_import():
assert selection_sort
def test_selection_sort_ints():
assert selection_sort([3, 1, 2]) == [1, 2, 3]
rand_ints = [random.randint(1, 100) for i in range(100)]
assert selection_sort(rand_ints) == sorted(rand_ints)
def test_selection_sort_strings():
assert selection_sort(['one', 'two', 'three']) == ['one', 'three', 'two']
def test_selection_sort_empty():
assert selection_sort([]) == []
def test_selection_sort_non_list_exception():
with pytest.raises(TypeError):
selection_sort({})
| 22.666667
| 77
| 0.699346
| 83
| 612
| 4.855422
| 0.385542
| 0.354839
| 0.235732
| 0.198511
| 0.193548
| 0.193548
| 0.193548
| 0
| 0
| 0
| 0
| 0.02544
| 0.165033
| 612
| 26
| 78
| 23.538462
| 0.763209
| 0
| 0
| 0
| 0
| 0
| 0.035948
| 0
| 0
| 0
| 0
| 0
| 0.3125
| 1
| 0.3125
| false
| 0
| 0.25
| 0
| 0.5625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
8c318e9f91e8a1fd70824fd9469012a7ef399891
| 133
|
py
|
Python
|
favorites/admin.py
|
UB-ES-2021-A1/wannasell-backend
|
84360b2985fc28971867601373697f39303e396b
|
[
"Unlicense"
] | null | null | null |
favorites/admin.py
|
UB-ES-2021-A1/wannasell-backend
|
84360b2985fc28971867601373697f39303e396b
|
[
"Unlicense"
] | 62
|
2021-11-22T21:52:44.000Z
|
2021-12-17T15:07:02.000Z
|
favorites/admin.py
|
UB-ES-2021-A1/wannasell-backend
|
84360b2985fc28971867601373697f39303e396b
|
[
"Unlicense"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from favorites.models import Favorites
admin.site.register(Favorites)
| 22.166667
| 38
| 0.827068
| 18
| 133
| 6.111111
| 0.611111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112782
| 133
| 6
| 39
| 22.166667
| 0.932203
| 0.195489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8c35c97145932b7114c21430fa1f3dfe12fbf528
| 114
|
py
|
Python
|
tests/test_handshake.py
|
Neilblaze/websockets
|
c39268c4867e41d11c20f7859583761d52a04012
|
[
"BSD-3-Clause"
] | 1
|
2021-03-04T06:10:30.000Z
|
2021-03-04T06:10:30.000Z
|
tests/test_handshake.py
|
Neilblaze/websockets
|
c39268c4867e41d11c20f7859583761d52a04012
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_handshake.py
|
Neilblaze/websockets
|
c39268c4867e41d11c20f7859583761d52a04012
|
[
"BSD-3-Clause"
] | null | null | null |
# Check that the legacy handshake module imports without an exception.
from websockets.handshake import * # noqa
| 38
| 70
| 0.798246
| 15
| 114
| 6.066667
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 114
| 2
| 71
| 57
| 0.947917
| 0.640351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8c5458859bb8994f413af4138d47e68f5f776ca6
| 6,922
|
py
|
Python
|
Assets/MyAssets/WakaTime/client/tests/test_dependencies.py
|
BillScott1024/WhiteBlocks
|
46c8373a4709c5a8a83a7b3da6b6bf7d3f9a37a5
|
[
"MIT"
] | null | null | null |
Assets/MyAssets/WakaTime/client/tests/test_dependencies.py
|
BillScott1024/WhiteBlocks
|
46c8373a4709c5a8a83a7b3da6b6bf7d3f9a37a5
|
[
"MIT"
] | null | null | null |
Assets/MyAssets/WakaTime/client/tests/test_dependencies.py
|
BillScott1024/WhiteBlocks
|
46c8373a4709c5a8a83a7b3da6b6bf7d3f9a37a5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from wakatime.main import execute
from wakatime.packages import requests
import os
import time
import sys
from wakatime.compat import u
from wakatime.packages.requests.models import Response
from . import utils
try:
from .packages import simplejson as json
except (ImportError, SyntaxError):
import json
try:
from mock import ANY
except ImportError:
from unittest.mock import ANY
class LanguagesTestCase(utils.TestCase):
patch_these = [
'wakatime.packages.requests.adapters.HTTPAdapter.send',
'wakatime.offlinequeue.Queue.push',
['wakatime.offlinequeue.Queue.pop', None],
'wakatime.session_cache.SessionCache.save',
'wakatime.session_cache.SessionCache.delete',
['wakatime.session_cache.SessionCache.get', requests.session],
]
def test_python_dependencies_detected(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/codefiles/python.py'
config = 'tests/samples/configs/good_config.cfg'
args = ['--file', entity, '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, 102)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
heartbeat = {
'language': u('Python'),
'lines': 36,
'entity': os.path.realpath(entity),
'project': u(os.path.basename(os.path.realpath('.'))),
'dependencies': ANY,
'branch': os.environ.get('TRAVIS_COMMIT', ANY),
'time': float(now),
'type': 'file',
}
stats = {
u('cursorpos'): None,
u('dependencies'): ANY,
u('language'): u('Python'),
u('lineno'): None,
u('lines'): 36,
}
expected_dependencies = [
'app',
'django',
'flask',
'jinja',
'mock',
'os',
'pygments',
'simplejson',
'sqlalchemy',
'sys',
'unittest',
]
def normalize(items):
return sorted([u(x) for x in items])
self.patched['wakatime.offlinequeue.Queue.push'].assert_called_once_with(heartbeat, ANY, None)
dependencies = self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['dependencies']
self.assertEquals(normalize(dependencies), normalize(expected_dependencies))
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_bower_dependencies_detected(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/codefiles/bower.json'
config = 'tests/samples/configs/good_config.cfg'
args = ['--file', entity, '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.assertEquals(retval, 102)
heartbeat = {
'language': u('JSON'),
'lines': 11,
'entity': os.path.realpath(entity),
'project': u(os.path.basename(os.path.realpath('.'))),
'dependencies': ANY,
'branch': os.environ.get('TRAVIS_COMMIT', ANY),
'time': float(now),
'type': 'file',
}
stats = {
u('cursorpos'): None,
u('dependencies'): ANY,
u('language'): u('JSON'),
u('lineno'): None,
u('lines'): 11,
}
expected_dependencies = ['animate.css', 'moment', 'moment-timezone']
self.patched['wakatime.offlinequeue.Queue.push'].assert_called_once_with(heartbeat, ANY, None)
for dep in expected_dependencies:
self.assertIn(dep, self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['dependencies'])
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
def test_java_dependencies_detected(self):
response = Response()
response.status_code = 0
self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = response
now = u(int(time.time()))
entity = 'tests/samples/codefiles/java.java'
config = 'tests/samples/configs/good_config.cfg'
args = ['--file', entity, '--config', config, '--time', now]
retval = execute(args)
self.assertEquals(retval, 102)
self.assertEquals(sys.stdout.getvalue(), '')
self.assertEquals(sys.stderr.getvalue(), '')
self.patched['wakatime.session_cache.SessionCache.get'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.delete'].assert_called_once_with()
self.patched['wakatime.session_cache.SessionCache.save'].assert_not_called()
heartbeat = {
'language': u('Java'),
'lines': 20,
'entity': os.path.realpath(entity),
'project': u(os.path.basename(os.path.realpath('.'))),
'dependencies': ANY,
'branch': os.environ.get('TRAVIS_COMMIT', ANY),
'time': float(now),
'type': 'file',
}
stats = {
u('cursorpos'): None,
u('dependencies'): ANY,
u('language'): u('Java'),
u('lineno'): None,
u('lines'): 20,
}
expected_dependencies = [
'googlecode.javacv',
'colorfulwolf.webcamapplet',
'foobar',
]
def normalize(items):
return sorted([u(x) for x in items])
self.patched['wakatime.offlinequeue.Queue.push'].assert_called_once_with(heartbeat, ANY, None)
dependencies = self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][0]['dependencies']
self.assertEquals(normalize(dependencies), normalize(expected_dependencies))
self.assertEquals(stats, json.loads(self.patched['wakatime.offlinequeue.Queue.push'].call_args[0][1]))
self.patched['wakatime.offlinequeue.Queue.pop'].assert_not_called()
| 37.215054
| 112
| 0.604739
| 722
| 6,922
| 5.695291
| 0.18144
| 0.056177
| 0.097033
| 0.090467
| 0.798152
| 0.759241
| 0.747811
| 0.747811
| 0.747811
| 0.747811
| 0
| 0.007122
| 0.249494
| 6,922
| 185
| 113
| 37.416216
| 0.784408
| 0.003034
| 0
| 0.576923
| 0
| 0
| 0.264386
| 0.181331
| 0
| 0
| 0
| 0
| 0.173077
| 1
| 0.032051
| false
| 0
| 0.089744
| 0.012821
| 0.147436
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8c578fa54cbe69c220761123bbbbae6703afb29d
| 90
|
py
|
Python
|
hvad/exceptions.py
|
aptivate/django-hvad
|
61457412eeae09b5df1c514a5b162230be125e1b
|
[
"BSD-3-Clause"
] | 1
|
2015-09-28T10:07:48.000Z
|
2015-09-28T10:07:48.000Z
|
hvad/exceptions.py
|
aptivate/django-hvad
|
61457412eeae09b5df1c514a5b162230be125e1b
|
[
"BSD-3-Clause"
] | null | null | null |
hvad/exceptions.py
|
aptivate/django-hvad
|
61457412eeae09b5df1c514a5b162230be125e1b
|
[
"BSD-3-Clause"
] | null | null | null |
from django.db.models.fields import FieldDoesNotExist
class WrongManager(Exception): pass
| 30
| 53
| 0.855556
| 11
| 90
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077778
| 90
| 3
| 54
| 30
| 0.927711
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
4fc81cbd9d5a5d0b088f55566fe35e73f4a041ee
| 56
|
py
|
Python
|
app/shema/models/__init__.py
|
powersemmi/notes
|
79346aa3e660c33016ecb30ee2cdcf25ff0e129f
|
[
"MIT"
] | null | null | null |
app/shema/models/__init__.py
|
powersemmi/notes
|
79346aa3e660c33016ecb30ee2cdcf25ff0e129f
|
[
"MIT"
] | null | null | null |
app/shema/models/__init__.py
|
powersemmi/notes
|
79346aa3e660c33016ecb30ee2cdcf25ff0e129f
|
[
"MIT"
] | null | null | null |
from .notes import Notes
from .base import BaseModel, db
| 28
| 31
| 0.803571
| 9
| 56
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 56
| 2
| 31
| 28
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4ffebe253ec8eff6998d93dbeb92d948d3717cbc
| 129
|
py
|
Python
|
bc_health_io/community/admin.py
|
catedt/bc_health_io
|
1942441067d731075a78d3082b83b706aa1e340f
|
[
"MIT"
] | 2
|
2020-12-08T21:51:10.000Z
|
2021-02-01T07:59:41.000Z
|
bc_health_io/community/admin.py
|
catedt/bc-health-io
|
1942441067d731075a78d3082b83b706aa1e340f
|
[
"MIT"
] | null | null | null |
bc_health_io/community/admin.py
|
catedt/bc-health-io
|
1942441067d731075a78d3082b83b706aa1e340f
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from community.models import Article
# Register your models here.
admin.site.register(Article)
| 21.5
| 36
| 0.821705
| 18
| 129
| 5.888889
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 129
| 5
| 37
| 25.8
| 0.929825
| 0.20155
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8b08246a472fa8b60a57e22f246877c148307e84
| 161
|
py
|
Python
|
doc/samples/title.py
|
m4ta1l/doit
|
d1a1b7b3abc7641d977d3b78b580d97aea4e27ea
|
[
"MIT"
] | 1,390
|
2015-01-01T21:11:47.000Z
|
2022-03-31T11:35:44.000Z
|
doc/samples/title.py
|
m4ta1l/doit
|
d1a1b7b3abc7641d977d3b78b580d97aea4e27ea
|
[
"MIT"
] | 393
|
2015-01-05T11:18:29.000Z
|
2022-03-20T11:46:46.000Z
|
doc/samples/title.py
|
m4ta1l/doit
|
d1a1b7b3abc7641d977d3b78b580d97aea4e27ea
|
[
"MIT"
] | 176
|
2015-01-07T16:58:56.000Z
|
2022-03-28T12:12:11.000Z
|
def show_cmd(task):
return "executing... %s" % task.name
def task_custom_display():
return {'actions':['echo abc efg'],
'title': show_cmd}
| 20.125
| 40
| 0.602484
| 21
| 161
| 4.428571
| 0.714286
| 0.150538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.229814
| 161
| 7
| 41
| 23
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.24375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.4
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
8b3628c78017380159c5e2dc649855110ce7d0ab
| 49
|
py
|
Python
|
commands/commandconfig.py
|
JamaSoftware/post-import-processing
|
f33b2a0b9ad7753bcec34b6378c91a8462454c9c
|
[
"MIT"
] | null | null | null |
commands/commandconfig.py
|
JamaSoftware/post-import-processing
|
f33b2a0b9ad7753bcec34b6378c91a8462454c9c
|
[
"MIT"
] | null | null | null |
commands/commandconfig.py
|
JamaSoftware/post-import-processing
|
f33b2a0b9ad7753bcec34b6378c91a8462454c9c
|
[
"MIT"
] | null | null | null |
class CommandConfig:
legacy_id = "legacy_id"
| 16.333333
| 27
| 0.734694
| 6
| 49
| 5.666667
| 0.666667
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183673
| 49
| 3
| 27
| 16.333333
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0.183673
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
8c9125d6b6e90b1a757912420ebfb0d6bb2088a3
| 81
|
py
|
Python
|
deephyper/nas/preprocessing/__init__.py
|
Z223I/deephyper
|
4fd1054dc22f15197567bdd93c6e7a95a614b8e2
|
[
"BSD-3-Clause"
] | 1
|
2021-09-03T18:24:31.000Z
|
2021-09-03T18:24:31.000Z
|
deephyper/nas/preprocessing/__init__.py
|
Z223I/deephyper
|
4fd1054dc22f15197567bdd93c6e7a95a614b8e2
|
[
"BSD-3-Clause"
] | null | null | null |
deephyper/nas/preprocessing/__init__.py
|
Z223I/deephyper
|
4fd1054dc22f15197567bdd93c6e7a95a614b8e2
|
[
"BSD-3-Clause"
] | 1
|
2021-08-31T13:47:27.000Z
|
2021-08-31T13:47:27.000Z
|
from deephyper.nas.preprocessing.preprocessing import minmaxstdscaler, stdscaler
| 40.5
| 80
| 0.888889
| 8
| 81
| 9
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061728
| 81
| 1
| 81
| 81
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8c9b93a2d3b2886167b2dd0a04fb8cb7a313a405
| 62
|
py
|
Python
|
jpy_flette/__init__.py
|
IMTorgDemo/jpy-flette
|
e2b656e1bd03de791e2c4e263ec4b8efc9bd8448
|
[
"MIT"
] | 5
|
2019-05-18T04:21:32.000Z
|
2021-09-15T17:51:52.000Z
|
jpy_flette/__init__.py
|
IMTorgDemo/jpy-flette
|
e2b656e1bd03de791e2c4e263ec4b8efc9bd8448
|
[
"MIT"
] | 1
|
2019-06-08T09:00:51.000Z
|
2019-06-08T09:01:20.000Z
|
jpy_flette/__init__.py
|
IMTorgDemo/jpy-flette
|
e2b656e1bd03de791e2c4e263ec4b8efc9bd8448
|
[
"MIT"
] | 4
|
2018-11-21T03:40:59.000Z
|
2021-03-12T10:46:00.000Z
|
from .jpy_flette import *
from .__version__ import __version__
| 31
| 36
| 0.83871
| 8
| 62
| 5.375
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112903
| 62
| 2
| 36
| 31
| 0.781818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8cc21b0db1923c5521bacc5eb13a5a6671266f4a
| 7,293
|
py
|
Python
|
test/test_sobolev_alignment.py
|
saroudant/sobolev_alignment
|
beec4074b24544020359550040e2b66707d113b3
|
[
"MIT"
] | 1
|
2022-03-10T15:21:49.000Z
|
2022-03-10T15:21:49.000Z
|
test/test_sobolev_alignment.py
|
saroudant/sobolev_alignment
|
beec4074b24544020359550040e2b66707d113b3
|
[
"MIT"
] | null | null | null |
test/test_sobolev_alignment.py
|
saroudant/sobolev_alignment
|
beec4074b24544020359550040e2b66707d113b3
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
import scipy
import torch
import pytest
from anndata import AnnData
from sklearn.datasets import make_spd_matrix
from joblib import Parallel, delayed
from sobolev_alignment import SobolevAlignment
n_samples = 500
n_samples_valid = 50
n_genes = 50
n_batches = 3
n_artificial_samples = 2000
n_latent = 5
frac_save_artificial = 0.1
@pytest.fixture(scope='module')
def source_data():
poisson_coef = np.random.randint(1, 25, size=n_genes)
return np.concatenate(
[np.random.poisson(lam=l, size=n_samples).reshape(-1, 1) for l in poisson_coef],
axis=1
)
@pytest.fixture(scope='module')
def target_data():
poisson_coef = np.random.randint(1, 25, size=n_genes)
return np.concatenate(
[np.random.poisson(lam=l, size=n_samples).reshape(-1, 1) for l in poisson_coef],
axis=1
)
@pytest.fixture(scope='module')
def source_batch():
return np.random.choice(np.arange(n_batches).astype(str), size=n_samples)
@pytest.fixture(scope='module')
def target_batch():
return np.random.choice(np.arange(n_batches).astype(str), size=n_samples)
@pytest.fixture(scope='module')
def source_anndata(source_data, source_batch):
return AnnData(
source_data,
obs=pd.DataFrame(source_batch, columns=['batch'])
)
@pytest.fixture(scope='module')
def target_anndata(target_data, target_batch):
return AnnData(
target_data,
obs=pd.DataFrame(target_batch, columns=['batch'])
)
@pytest.fixture(scope='module')
def source_scvi_params():
return {
'model': {
'dispersion': 'gene-cell',
'gene_likelihood': 'zinb',
'n_hidden': 20,
'n_latent': n_latent,
'n_layers': 1,
'dropout_rate': 0.1
},
'plan': {
'lr': 0.005,
'weight_decay': 0.01,
'reduce_lr_on_plateau': True,
},
'train': {
'early_stopping': True,
'max_epochs': 10
}
}
@pytest.fixture(scope='module')
def target_scvi_params():
return {
'model': {
'dispersion': 'gene-cell',
'gene_likelihood': 'zinb',
'n_hidden': 20,
'n_latent': n_latent,
'n_layers': 1,
'dropout_rate': 0.1
},
'plan': {
'lr': 0.005,
'weight_decay': 0.01,
'reduce_lr_on_plateau': True,
},
'train': {
'early_stopping': True,
'max_epochs': 10
}
}
class TestSobolevAlignment():
@pytest.fixture(scope='class')
def sobolev_alignment_raw(
self,
source_scvi_params,
target_scvi_params
):
return SobolevAlignment(
source_scvi_params,
target_scvi_params
)
@pytest.fixture(scope='class')
def sobolev_alignment_batch(
self,
source_scvi_params,
target_scvi_params
):
return SobolevAlignment(
source_scvi_params,
target_scvi_params
)
@pytest.fixture(scope='class')
def scvi_raw_trained(
self,
source_anndata,
target_anndata,
sobolev_alignment_raw
):
return sobolev_alignment_raw.fit(
X_source=source_anndata,
X_target=target_anndata,
source_batch_name=None,
target_batch_name=None
)
@pytest.fixture(scope='class')
def scvi_batch_trained(
self,
source_anndata,
target_anndata,
sobolev_alignment_batch
):
return sobolev_alignment_batch.fit(
X_source=source_anndata,
X_target=target_anndata,
source_batch_name='batch',
target_batch_name='batch',
n_artificial_samples=n_artificial_samples,
frac_save_artificial=frac_save_artificial
)
@pytest.fixture(scope='class')
def scvi_batch_trained_lib_size(
self,
source_anndata,
target_anndata,
sobolev_alignment_batch
):
return sobolev_alignment_batch.fit(
X_source=source_anndata,
X_target=target_anndata,
source_batch_name='batch',
target_batch_name='batch',
n_artificial_samples=n_artificial_samples,
frac_save_artificial=frac_save_artificial,
lib_size_norm=True
)
###
# TEST INIT METHODS
###
def test_training_scvi_batch_trained(
self,
scvi_batch_trained,
):
assert type(scvi_batch_trained.scvi_models) == dict
for x, model in scvi_batch_trained.scvi_models.items():
assert model.history['train_loss_epoch'].values[-1,0] < model.history['train_loss_epoch'].values[0,0]
for x in scvi_batch_trained.artificial_samples_:
assert scvi_batch_trained.artificial_samples_[x].shape[0] == n_artificial_samples * frac_save_artificial
assert scvi_batch_trained.artificial_samples_[x].shape[1] == n_genes
for x in scvi_batch_trained.artificial_embeddings_:
assert scvi_batch_trained.artificial_embeddings_[x].shape[0] == n_artificial_samples * frac_save_artificial
assert scvi_batch_trained.artificial_embeddings_[x].shape[1] == n_latent
def test_training_scvi_batch_trained_lib_size(
self,
scvi_batch_trained_lib_size,
):
assert type(scvi_batch_trained_lib_size.scvi_models) == dict
for x, model in scvi_batch_trained_lib_size.scvi_models.items():
assert model.history['train_loss_epoch'].values[-1,0] < model.history['train_loss_epoch'].values[0,0]
for x in scvi_batch_trained_lib_size.artificial_samples_:
assert scvi_batch_trained_lib_size.artificial_samples_[x].shape[0] == n_artificial_samples * frac_save_artificial
assert scvi_batch_trained_lib_size.artificial_samples_[x].shape[1] == n_genes
for x in scvi_batch_trained_lib_size.artificial_embeddings_:
assert scvi_batch_trained_lib_size.artificial_embeddings_[x].shape[0] == n_artificial_samples * frac_save_artificial
assert scvi_batch_trained_lib_size.artificial_embeddings_[x].shape[1] == n_latent
# np.savetxt(open('source.csv', 'w'), scvi_batch_trained_lib_size.artificial_samples_['source'])
# np.savetxt(open('target.csv', 'w'), scvi_batch_trained_lib_size.artificial_samples_['target'])
assert np.mean(np.sum(scvi_batch_trained_lib_size.artificial_samples_['source'], axis=1)) == np.mean(np.sum(scvi_batch_trained_lib_size.artificial_samples_['target'], axis=1))
def test_KRR_scvi_trained(self, scvi_batch_trained):
for x in scvi_batch_trained.artificial_samples_:
assert scvi_batch_trained.approximate_krr_regressions_[x].sample_weights_.shape[1] == n_latent
# def test_training_scvi_raw_trained(self, scvi_raw_trained):
# assert type(scvi_raw_trained.scvi_models) == dict
# for x, model in scvi_raw_trained.scvi_models.items():
# assert model.history['train_loss_epoch'].values[-1,0] < model.history['train_loss_epoch'].values[0,0]
| 32.127753
| 183
| 0.64322
| 891
| 7,293
| 4.892256
| 0.14927
| 0.059876
| 0.106446
| 0.065382
| 0.84033
| 0.815325
| 0.783666
| 0.746272
| 0.666896
| 0.629043
| 0
| 0.014396
| 0.257096
| 7,293
| 226
| 184
| 32.269912
| 0.790144
| 0.067051
| 0
| 0.567568
| 0
| 0
| 0.069387
| 0
| 0
| 0
| 0
| 0
| 0.075676
| 1
| 0.086486
| false
| 0
| 0.048649
| 0.059459
| 0.210811
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8cdab304582ab964abdf6b33aab664502de73c04
| 170
|
py
|
Python
|
src/utily/admin.py
|
LeeSinLiang/eC2
|
a7cd4333d479c8f5695f52c382fbe38c83975cde
|
[
"MIT"
] | 1
|
2019-06-09T12:09:21.000Z
|
2019-06-09T12:09:21.000Z
|
src/utily/admin.py
|
LeeSinLiang/eC2
|
a7cd4333d479c8f5695f52c382fbe38c83975cde
|
[
"MIT"
] | 7
|
2019-12-19T05:29:16.000Z
|
2022-03-11T23:49:05.000Z
|
src/utily/admin.py
|
LeeSinLiang/eC2
|
a7cd4333d479c8f5695f52c382fbe38c83975cde
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Review)
admin.site.register(Wishlist)
admin.site.register(History)
| 28.333333
| 32
| 0.811765
| 24
| 170
| 5.75
| 0.541667
| 0.195652
| 0.369565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 170
| 6
| 33
| 28.333333
| 0.890323
| 0.152941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8cdbc82af9f34d824fc9928d2b1c75cce802c55b
| 159
|
py
|
Python
|
bindings/python/__init__.py
|
Christopher22/matryoshka-sqlite
|
9fbd47fb7a60c80167507f5dd2eec840da972bbd
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
bindings/python/__init__.py
|
Christopher22/matryoshka-sqlite
|
9fbd47fb7a60c80167507f5dd2eec840da972bbd
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
bindings/python/__init__.py
|
Christopher22/matryoshka-sqlite
|
9fbd47fb7a60c80167507f5dd2eec840da972bbd
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
from matryoshka import Matryoshka
from status import Status
from exception import MatryoshkaException
from file_system import FileSystem
from file import File
| 26.5
| 41
| 0.874214
| 21
| 159
| 6.571429
| 0.428571
| 0.115942
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125786
| 159
| 5
| 42
| 31.8
| 0.992806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
508aefc6c19c865a78993e4f617ef7a42b4ca999
| 55
|
py
|
Python
|
bin/intensity_normalization/__init__.py
|
nibill/MIALab-1
|
e3550c962b21d5f0b9cb705e423d3016d294bd8d
|
[
"Apache-2.0"
] | null | null | null |
bin/intensity_normalization/__init__.py
|
nibill/MIALab-1
|
e3550c962b21d5f0b9cb705e423d3016d294bd8d
|
[
"Apache-2.0"
] | null | null | null |
bin/intensity_normalization/__init__.py
|
nibill/MIALab-1
|
e3550c962b21d5f0b9cb705e423d3016d294bd8d
|
[
"Apache-2.0"
] | 1
|
2022-01-31T02:48:02.000Z
|
2022-01-31T02:48:02.000Z
|
from . import errors, normalize, utilities, plot, exec
| 27.5
| 54
| 0.763636
| 7
| 55
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 55
| 1
| 55
| 55
| 0.893617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
50c8596e74fd466de5ec4dc4f3593a0f76cf7830
| 1,942
|
py
|
Python
|
tests/TestFileReaderAndWriter.py
|
dabal/MultiRegexpProcessor
|
ee0f2b55e9e8652c44da13c9ceb8b1bd034626d5
|
[
"MIT"
] | null | null | null |
tests/TestFileReaderAndWriter.py
|
dabal/MultiRegexpProcessor
|
ee0f2b55e9e8652c44da13c9ceb8b1bd034626d5
|
[
"MIT"
] | null | null | null |
tests/TestFileReaderAndWriter.py
|
dabal/MultiRegexpProcessor
|
ee0f2b55e9e8652c44da13c9ceb8b1bd034626d5
|
[
"MIT"
] | null | null | null |
import unittest
from FileReaderAndWriter import *
import Ekstraktor
import sys
from io import StringIO
class TestFileReaderAndWriter(unittest.TestCase):
def test_perform1(self):
testString=unicode("""jakas linia z 99
jakas linia do zapisania1
jakas linia do zapisania2
jakas linia co ma na koncu ala""")
resultString=unicode("""\r\n jakas linia do zapisania1\r\n jakas linia do zapisania2\r\n""")
infile=StringIO()
infile.write(testString)
infile.seek(0)
outfile=StringIO()
obj=FileReaderAndWriter()
regexp=[r'\d\d', r'ala$']
#regexp=[]
ekstraktor=Ekstraktor.Ekstraktor(regexp)
integrator=Ekstraktor.Ekstraktor([r'j'])
outfile=obj.perform(infile, outfile,ekstraktor,integrator)
outfile.seek(0)
content = outfile.read()
#print '******'+content+'*********\r\n'
self.assertEqual(content,resultString)
def test_perform2(self):
testString=unicode("""jakas linia z 99
jakas linia do zapisania1
jakas linia do zapisania2
jakas linia co ma na koncu ala
jakas linia do zapisania3""")
resultString=unicode(""" jakas linia do zapisania1 jakas linia do zapisania2 jakas linia do zapisania3\r\n""")
infile=StringIO()
infile.write(testString)
infile.seek(0)
outfile=StringIO()
obj=FileReaderAndWriter()
regexp=[r'\d\d', r'ala$']
#regexp=[]
ekstraktor=Ekstraktor.Ekstraktor(regexp)
integrator=Ekstraktor.Ekstraktor([])
outfile=obj.perform(infile, outfile,ekstraktor,integrator)
outfile.seek(0)
content = outfile.read()
#print '******'+content+'*********\r\n'
self.assertEqual(content,resultString)
#python -m unittest tests.TestFileReaderAndWriter
| 37.346154
| 140
| 0.609166
| 204
| 1,942
| 5.789216
| 0.254902
| 0.118544
| 0.101609
| 0.074513
| 0.746825
| 0.723116
| 0.723116
| 0.723116
| 0.723116
| 0.723116
| 0
| 0.014215
| 0.275489
| 1,942
| 51
| 141
| 38.078431
| 0.82516
| 0.07312
| 0
| 0.651163
| 0
| 0
| 0.274083
| 0
| 0
| 0
| 0
| 0
| 0.046512
| 1
| 0.046512
| false
| 0
| 0.116279
| 0
| 0.186047
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
50eae631f67d61d9aaf3fdea78eef11ac2ef69df
| 9,970
|
py
|
Python
|
src/cool_compiler/parser/cool_parser.py
|
CeroMB/cool-compiler-2021
|
00162bb98651af0163c56f5e7f04884098da1812
|
[
"MIT"
] | null | null | null |
src/cool_compiler/parser/cool_parser.py
|
CeroMB/cool-compiler-2021
|
00162bb98651af0163c56f5e7f04884098da1812
|
[
"MIT"
] | null | null | null |
src/cool_compiler/parser/cool_parser.py
|
CeroMB/cool-compiler-2021
|
00162bb98651af0163c56f5e7f04884098da1812
|
[
"MIT"
] | 1
|
2022-03-13T23:05:33.000Z
|
2022-03-13T23:05:33.000Z
|
from sly import Parser
from .__dependency import CoolTokens
from .factory_decored import NodesName
class CoolParser(Parser):
tokens = CoolTokens.tokens
start = 'program'
precedence = (
('right', 'ARROW'),
('left','NOT'),
('nonassoc', '=','<','LESS_OR'),
('left', '+', '-'),
('left', '*', '/'),
('left', "ISVOID"),
('left', '~'),
('left', '@'),
('left', '.'),
('right', 'IN'),
)
def __init__(self, factory, errors):
self.factory = factory
self.cool_error = errors
self.lte = None
def error(self, token):
tok = next(self.tokens, None)
if self.lte is None or not self.lte == token :
if token is None:
try:
tok = self.symstack[-1]
self.cool_error(tok.lineno, tok.index)
except AttributeError:
self.cool_error.pos = (0, 0)
self.cool_error.add_syntactic(f"ERROR at or near EOF")
return
else:
char = token.value
self.cool_error(token.lineno, token.index)
self.cool_error.add_syntactic(f"ERROR at or near {char}")
self.lte = tok
return tok
@_("")
def epsilon(self, prod):
pass
@_('class_list')
def program(self, prod):
return self.factory( NodesName.Program, prod.class_list )
@_("cclass epsilon")
def class_list(self, prod):
return [prod.cclass]
@_('cclass class_list')
def class_list(self, prod):
return [prod.cclass] + prod.class_list
@_('CLASS TYPE "{" class_feature "}" ";" ')
def cclass(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Class, prod.TYPE, None, prod.class_feature )
@_('CLASS TYPE INHERITS TYPE "{" class_feature "}" ";"')
def cclass(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Class, prod.TYPE0, prod.TYPE1, prod.class_feature )
@_('def_atr ";" class_feature')
def class_feature(self, prod):
return [prod.def_atr] + prod.class_feature
@_('def_func ";" class_feature')
def class_feature(self, prod):
return [prod.def_func] + prod.class_feature
@_('epsilon')
def class_feature(self, prod):
return []
@_('ID ":" TYPE')
def def_atr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.DefAtr, prod.ID, prod.TYPE, None )
@_('ID ":" TYPE ARROW expr')
def def_atr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.DefAtr, prod.ID, prod.TYPE, prod.expr )
@_('ID "(" param_list ")" ":" TYPE "{" expr "}"')
def def_func(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.DefFunc, prod.ID, prod.param_list, prod.TYPE, prod.expr )
@_('ID "(" ")" ":" TYPE "{" expr "}"')
def def_func(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.DefFunc, prod.ID, [], prod.TYPE, prod.expr )
@_('ID ":" TYPE "," param_list')
def param_list(self, prod):
return [( prod.ID, prod.TYPE )] + prod.param_list
@_('ID ":" TYPE')
def param_list(self, prod):
return [( prod.ID, prod.TYPE )]
@_('ID ARROW expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Assing, prod.ID, prod.expr)
@_('expr "@" TYPE "." ID "(" expr_list ")"', 'expr "@" TYPE "." ID "(" ")"' )
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
try:
return self.factory( NodesName.CastingDispatch, prod.expr, prod.TYPE, prod.ID, prod.expr_list)
except AttributeError:
return self.factory( NodesName.CastingDispatch, prod.expr, prod.TYPE, prod.ID, [])
@_('expr "." ID "(" expr_list ")"', 'expr "." ID "(" ")"')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
try:
return self.factory( NodesName.Dispatch, prod.expr, prod.ID, prod.expr_list)
except:
return self.factory( NodesName.Dispatch, prod.expr, prod.ID, [])
@_('ID "(" expr_list ")"', 'ID "(" ")"')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
try:
return self.factory( NodesName.StaticDispatch, prod.ID, prod.expr_list )
except AttributeError:
return self.factory( NodesName.StaticDispatch, prod.ID, [] )
@_('expr "," expr_list')
def expr_list(self, prod):
return [prod.expr] + prod.expr_list
@_('expr')
def expr_list(self, prod):
return [prod.expr]
@_('IF expr THEN expr ELSE expr FI')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.IfThenElse, prod.expr0, prod.expr1, prod.expr2)
@_('WHILE expr LOOP expr POOL')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.While, prod.expr0, prod.expr1)
@_('"{" block_list "}"')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Block, prod.block_list)
@_('expr ";" block_list')
def block_list(self, prod):
return [prod.expr] + prod.block_list
@_('expr ";" epsilon')
def block_list(self, prod):
return [prod.expr]
@_('LET let_list IN expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.LetIn, prod.let_list, prod.expr)
@_('let_assign "," let_list')
def let_list(self, prod):
return [prod.let_assign] + prod.let_list
@_('let_assign epsilon')
def let_list(self, prod):
return [prod.let_assign]
@_('ID ":" TYPE ARROW expr')
def let_assign(self, prod):
return (prod.ID, prod.TYPE, prod.expr)
@_('ID ":" TYPE')
def let_assign(self, prod):
return (prod.ID, prod.TYPE, None)
@_('CASE expr OF case_list ESAC')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Case, prod.case_list, prod.expr)
@_('ID ":" TYPE LOGICAR expr ";" case_list')
def case_list(self, prod):
return [( prod.ID, prod.TYPE, prod.expr )] + prod.case_list
@_('ID ":" TYPE LOGICAR expr ";"')
def case_list(self, prod):
return [( prod.ID, prod.TYPE, prod.expr )]
@_('NEW TYPE')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.New, prod.TYPE )
@_('ISVOID expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.IsVoid, prod.expr )
@_('expr "+" expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Sum, prod.expr0, prod.expr1 )
@_('expr "-" expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Rest, prod.expr0, prod.expr1 )
@_('expr "*" expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Mult, prod.expr0, prod.expr1 )
@_('expr "/" expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Div, prod.expr0, prod.expr1 )
@_('"~" expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Complement, prod.expr )
@_('expr "<" expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Less, prod.expr0, prod.expr1 )
@_('expr LESS_OR expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.LessOrEquals, prod.expr0, prod.expr1 )
@_('expr "=" expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Equals, prod.expr0, prod.expr1 )
@_('NOT expr')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Neg, prod.expr )
@_('"(" expr ")"')
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return prod.expr
@_("ID")
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.IdExpr, prod.ID)
@_("NUMBER")
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Int, prod.NUMBER)
@_("STRING")
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Str, prod.STRING)
@_("TRUE")
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Bool, prod.TRUE)
@_("FALSE")
def expr(self, prod):
self.factory.get_pos_to_errors(prod.lineno, prod.index)
return self.factory( NodesName.Bool, prod.FALSE)
| 34.982456
| 106
| 0.600201
| 1,264
| 9,970
| 4.553006
| 0.089399
| 0.131885
| 0.103388
| 0.158123
| 0.741616
| 0.728063
| 0.716768
| 0.707732
| 0.667767
| 0.64066
| 0
| 0.003228
| 0.254162
| 9,970
| 285
| 107
| 34.982456
| 0.77071
| 0
| 0
| 0.407725
| 0
| 0
| 0.110119
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.227468
| false
| 0.004292
| 0.012876
| 0.077253
| 0.493562
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
50f65f0560eece8cf6f59f0745cdd51db44327f3
| 9,041
|
py
|
Python
|
h0rton/tests/test_h0_inference/test_gaussian_bnn_posterior_cpu.py
|
jiwoncpark/h0rton
|
2541885d70d090fdb777339cfb77a3a9f3e7996d
|
[
"MIT"
] | 4
|
2020-12-02T02:18:08.000Z
|
2021-11-25T21:56:33.000Z
|
h0rton/tests/test_h0_inference/test_gaussian_bnn_posterior_cpu.py
|
jiwoncpark/h0rton
|
2541885d70d090fdb777339cfb77a3a9f3e7996d
|
[
"MIT"
] | 25
|
2019-10-17T08:18:38.000Z
|
2020-12-26T09:38:05.000Z
|
h0rton/tests/test_h0_inference/test_gaussian_bnn_posterior_cpu.py
|
jiwoncpark/h0rton
|
2541885d70d090fdb777339cfb77a3a9f3e7996d
|
[
"MIT"
] | 1
|
2020-12-03T02:14:12.000Z
|
2020-12-03T02:14:12.000Z
|
import unittest
import numpy as np
import torch
from h0rton.h0_inference.gaussian_bnn_posterior_cpu import DiagonalGaussianBNNPosteriorCPU, LowRankGaussianBNNPosteriorCPU, DoubleLowRankGaussianBNNPosteriorCPU, FullRankGaussianBNNPosteriorCPU, DoubleGaussianBNNPosteriorCPU
from h0rton.h0_inference.gaussian_bnn_posterior_cpu import sigmoid
class TestGaussianBNNPosteriorCPU(unittest.TestCase):
"""A suite of tests verifying that the input PDFs and the sample distributions
match.
"""
def test_diagonal_gaussian_bnn_posterior_cpu(self):
"""Test the sampling of `DiagonalGaussianBNNPosteriorCPU`
"""
Y_dim = 2
batch_size = 3
rank = 2
sample_seed = 1113
mu = np.random.randn(batch_size, Y_dim)
logvar = np.abs(np.random.randn(batch_size, Y_dim))
pred = np.concatenate([mu, logvar], axis=1)
# Get h0rton samples
#Y_mean = np.random.randn(batch_size, Y_dim)
#Y_std = np.abs(np.random.randn(batch_size, Y_dim))
Y_mean = np.zeros(Y_dim)
Y_std = np.ones(Y_dim)
diagonal_bnn_post = DiagonalGaussianBNNPosteriorCPU(Y_dim, Y_mean, Y_std)
diagonal_bnn_post.set_sliced_pred(pred)
h0rton_samples = diagonal_bnn_post.sample(10**7, sample_seed)
# Get h0rton summary stats
h0rton_mean = np.mean(h0rton_samples, axis=1)
h0rton_covmat = np.zeros((batch_size, Y_dim, Y_dim))
exp_covmat = np.zeros((batch_size, Y_dim, Y_dim))
for b in range(batch_size):
cov_b = np.cov(h0rton_samples[b, :, :].swapaxes(0, 1), ddof=0)
h0rton_covmat[b, :, :] = cov_b
exp_covmat[b, :, :] += np.diagflat(np.exp(logvar[b, :]))
# Get expected summary stats
exp_mean = mu
np.testing.assert_array_almost_equal(h0rton_mean, exp_mean, decimal=2)
np.testing.assert_array_almost_equal(h0rton_covmat, exp_covmat, decimal=2)
def test_low_rank_gaussian_bnn_posterior_cpu(self):
"""Test the sampling of `LowRankGaussianBNNPosteriorCPU`
"""
Y_dim = 2
batch_size = 3
rank = 2
sample_seed = 1113
mu = np.random.randn(batch_size, Y_dim)
logvar = np.abs(np.random.randn(batch_size, Y_dim))
F = np.random.randn(batch_size, Y_dim*rank)
F_unraveled = F.reshape(batch_size, Y_dim, rank)
FFT = np.matmul(F_unraveled, np.swapaxes(F_unraveled, 1, 2))
pred = np.concatenate([mu, logvar, F], axis=1)
# Get h0rton samples
#Y_mean = np.random.randn(batch_size, Y_dim)
#Y_std = np.abs(np.random.randn(batch_size, Y_dim))
Y_mean = np.zeros(Y_dim)
Y_std = np.ones(Y_dim)
low_rank_bnn_post = LowRankGaussianBNNPosteriorCPU(Y_dim, Y_mean, Y_std)
low_rank_bnn_post.set_sliced_pred(pred)
h0rton_samples = low_rank_bnn_post.sample(10**7, sample_seed)
#import matplotlib.pyplot as plt
#plt.hist(h0rton_samples[0, :, 0], bins=30)
#plt.axvline(mu[0, 0], color='r')
#plt.show()
# Get h0rton summary stats
h0rton_mean = np.mean(h0rton_samples, axis=1)
h0rton_covmat = np.empty((batch_size, Y_dim, Y_dim))
exp_covmat = FFT
for b in range(batch_size):
cov_b = np.cov(h0rton_samples[b, :, :].swapaxes(0, 1), ddof=0)
h0rton_covmat[b, :, :] = cov_b
exp_covmat[b, :, :] += np.diagflat(np.exp(logvar[b, :]))
# Get expected summary stats
exp_mean = mu
np.testing.assert_array_almost_equal(h0rton_mean, exp_mean, decimal=2)
np.testing.assert_array_almost_equal(h0rton_covmat, exp_covmat, decimal=2)
def test_double_low_rank_gaussian_bnn_posterior_cpu(self):
"""Test the sampling of `DoubleLowRankGaussianBNNPosteriorCPU`
Note
----
Only compares the true and sample means
"""
Y_dim = 2
batch_size = 3
rank = 2
sample_seed = 1113
# First gaussian
mu = np.random.randn(batch_size, Y_dim)
logvar = np.abs(np.random.randn(batch_size, Y_dim))
F = np.random.randn(batch_size, Y_dim*rank)
F_unraveled = F.reshape(batch_size, Y_dim, rank)
FFT = np.matmul(F_unraveled, np.swapaxes(F_unraveled, 1, 2))
# Second gaussian
mu2 = np.random.randn(batch_size, Y_dim)
logvar2 = np.abs(np.random.randn(batch_size, Y_dim))
F2 = np.random.randn(batch_size, Y_dim*rank)
F2_unraveled = F2.reshape(batch_size, Y_dim, rank)
FFT2 = np.matmul(F2_unraveled, np.swapaxes(F2_unraveled, 1, 2))
alpha = np.random.randn(batch_size, 1)
pred = np.concatenate([mu, logvar, F, mu2, logvar2, F2, alpha], axis=1)
# Get h0rton samples
#Y_mean = np.random.randn(batch_size, Y_dim)
#Y_std = np.abs(np.random.randn(batch_size, Y_dim))
Y_mean = np.zeros(Y_dim)
Y_std = np.ones(Y_dim)
double_bnn_post = DoubleLowRankGaussianBNNPosteriorCPU(Y_dim, Y_mean, Y_std)
double_bnn_post.set_sliced_pred(pred,)
h0rton_samples = double_bnn_post.sample(10**7, sample_seed)
# Get h0rton summary stats
h0rton_mean = np.mean(h0rton_samples, axis=1)
# Get expected summary stats
w2 = 0.5*sigmoid(alpha)
w1 = 1.0 - w2
exp_mean = mu*w1 + mu2*w2
np.testing.assert_array_almost_equal(h0rton_mean, exp_mean, decimal=2)
def test_full_rank_gaussian_bnn_posterior_cpu(self):
"""Test the sampling of `FullRankGaussianBNNPosteriorCPU`
"""
Y_dim = 2
batch_size = 3
tril_idx = np.tril_indices(Y_dim)
tril_len = len(tril_idx[0])
sample_seed = 1113
# Get h0rton evaluation
mu = np.random.randn(batch_size, Y_dim)
tril_elements = np.random.randn(batch_size, tril_len)
pred = np.concatenate([mu, tril_elements], axis=1).astype(np.float32)
# Get h0rton samples
#Y_mean = np.random.randn(batch_size, Y_dim)
#Y_std = np.abs(np.random.randn(batch_size, Y_dim))
Y_mean = np.zeros(Y_dim)
Y_std = np.ones(Y_dim)
post = FullRankGaussianBNNPosteriorCPU(Y_dim, Y_mean, Y_std)
post.set_sliced_pred(pred)
h0rton_samples = post.sample(10**7, sample_seed)
#import matplotlib.pyplot as plt
#plt.hist(h0rton_samples[0, :, 0], bins=30)
#plt.axvline(mu[0, 0], color='r')
#plt.show()
# Get h0rton summary stats
h0rton_mean = np.mean(h0rton_samples, axis=1)
h0rton_covmat = np.empty((batch_size, Y_dim, Y_dim))
np_covmat = np.empty((batch_size, Y_dim, Y_dim))
for b in range(batch_size):
# Cov mat calculated from H0rton samples
cov_b = np.cov(h0rton_samples[b, :, :].swapaxes(0, 1), ddof=0)
h0rton_covmat[b, :, :] = cov_b
# Cov mat expected from the PDF
tril = np.zeros((Y_dim, Y_dim))
tril[tril_idx[0], tril_idx[1]] = tril_elements[b, :]
log_diag_tril = np.diagonal(tril, offset=0, axis1=0, axis2=1).copy()
tril[np.eye(Y_dim).astype(bool)] = np.exp(log_diag_tril)
prec_mat = np.dot(tril, tril.T)
cov_mat = np.linalg.inv(prec_mat)
np_covmat[b, :, :] = cov_mat
# Get expected summary stats
np_mean = mu
np.testing.assert_array_almost_equal(h0rton_mean, np_mean, decimal=2)
np.testing.assert_array_almost_equal(h0rton_covmat, np_covmat, decimal=2)
def test_double_gaussian_bnn_posterior_cpu(self):
"""Test the sampling of `DoubleGaussianBNNPosteriorCPU`
Note
----
Only compares the true and sample means
"""
Y_dim = 2
batch_size = 3
tril_idx = np.tril_indices(Y_dim)
tril_len = len(tril_idx[0])
sample_seed = 1113
# Get h0rton evaluation
# First gaussian
mu = np.random.randn(batch_size, Y_dim)
tril_elements = np.random.randn(batch_size, tril_len)
# Second gaussian
mu2 = np.random.randn(batch_size, Y_dim)
tril_elements2 = np.random.randn(batch_size, tril_len)
alpha = np.random.randn(batch_size, 1)
pred = np.concatenate([mu, tril_elements, mu2, tril_elements2, alpha], axis=1)
# Get h0rton samples
#Y_mean = np.random.randn(batch_size, Y_dim)
#Y_std = np.abs(np.random.randn(batch_size, Y_dim))
Y_mean = np.zeros(Y_dim)
Y_std = np.ones(Y_dim)
post = DoubleGaussianBNNPosteriorCPU(Y_dim, Y_mean, Y_std)
post.set_sliced_pred(pred,)
h0rton_samples = post.sample(10**7, sample_seed)
# Get h0rton summary stats
h0rton_mean = np.mean(h0rton_samples, axis=1)
# Get expected summary stats
w2 = 0.5*sigmoid(alpha)
w1 = 1.0 - w2
np_mean = mu*w1 + mu2*w2
np.testing.assert_array_almost_equal(h0rton_mean, np_mean, decimal=2)
if __name__ == '__main__':
unittest.main()
| 42.848341
| 224
| 0.635439
| 1,278
| 9,041
| 4.226917
| 0.118936
| 0.045909
| 0.059237
| 0.077009
| 0.783599
| 0.776749
| 0.744909
| 0.738986
| 0.709922
| 0.660496
| 0
| 0.028584
| 0.257051
| 9,041
| 210
| 225
| 43.052381
| 0.775644
| 0.189913
| 0
| 0.625
| 0
| 0
| 0.001117
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 1
| 0.036765
| false
| 0
| 0.036765
| 0
| 0.080882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0fa38952d658c9dd1b11270529a86bee417e71e4
| 178
|
py
|
Python
|
pygeop/geom3d/__init__.py
|
tatsy/pygeop
|
fa11fb11617437464e6299a2d36f15bc7b9bf53a
|
[
"MIT"
] | null | null | null |
pygeop/geom3d/__init__.py
|
tatsy/pygeop
|
fa11fb11617437464e6299a2d36f15bc7b9bf53a
|
[
"MIT"
] | null | null | null |
pygeop/geom3d/__init__.py
|
tatsy/pygeop
|
fa11fb11617437464e6299a2d36f15bc7b9bf53a
|
[
"MIT"
] | null | null | null |
from .face import *
from .halfedge import *
from .simplify import simplify
from .trimesh import *
from .vector import *
from .vertex import *
from .objmesh import ObjMesh
| 22.25
| 31
| 0.735955
| 23
| 178
| 5.695652
| 0.391304
| 0.381679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196629
| 178
| 7
| 32
| 25.428571
| 0.916084
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0fbbf0dc8470848b93b61ee2eb6b8db778847710
| 1,612
|
py
|
Python
|
search/count_elem.py
|
Ratione/Python-Algroithms
|
3485d63e47ff635b8ca44f90400167c516bcbd6d
|
[
"MIT"
] | 5
|
2018-05-09T04:02:04.000Z
|
2021-02-21T19:27:56.000Z
|
search/count_elem.py
|
Ratione/Python-Algroithms
|
3485d63e47ff635b8ca44f90400167c516bcbd6d
|
[
"MIT"
] | null | null | null |
search/count_elem.py
|
Ratione/Python-Algroithms
|
3485d63e47ff635b8ca44f90400167c516bcbd6d
|
[
"MIT"
] | 5
|
2018-02-23T22:08:28.000Z
|
2020-08-19T08:31:47.000Z
|
def count_elem(array, query):
def first_occurance(array, query):
lo, hi = 0, len(array) -1
while lo <= hi:
mid = lo + (hi - lo) // 2
if (array[mid] == query and mid == 0) or \
(array[mid] == query and array[mid-1] < query):
return mid
elif (array[mid] <= query):
lo = mid + 1
else:
hi = mid - 1
def last_occurance(array, query):
lo, hi = 0, len(array) -1
while lo <= hi:
mid = lo + (hi - lo) // 2
if (array[mid] == query and mid == len(array) - 1) or \
(array[mid] == query and array[mid+1] > query):
return mid
elif (array[mid] <= query):
lo = mid + 1
else:
hi = mid - 1
first = first_occurance(array, query)
last = last_occurance(array, query)
if first is None or last is None:
return None
return last - first + 1
array = [1,2,3,3,3,3,4,4,4,4,5,6,6,6]
print(array)
print("-----COUNT-----")
query = 3
print("count: ", query, " :" , count_elem(array, query))
print("-----COUNT-----")
query = 5
print("count: ", query, " :" , count_elem(array, query))
print("-----COUNT-----")
query = 7
print("count: ", query, " :" , count_elem(array, query))
print("-----COUNT-----")
query = 1
print("count: ", query, " :" , count_elem(array, query))
print("-----COUNT-----")
query = -1
print("count: ", query, " :" , count_elem(array, query))
print("-----COUNT-----")
query = 9
print("count: ", query, " :" , count_elem(array, query))
| 30.415094
| 67
| 0.488834
| 210
| 1,612
| 3.7
| 0.147619
| 0.15444
| 0.23166
| 0.171171
| 0.740026
| 0.740026
| 0.740026
| 0.696268
| 0.696268
| 0.696268
| 0
| 0.031732
| 0.315757
| 1,612
| 52
| 68
| 31
| 0.672711
| 0
| 0
| 0.583333
| 0
| 0
| 0.08933
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0
| 0
| 0.145833
| 0.270833
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0fd7cc5cb9b9688b1eaa7010b5611fd321026e46
| 179
|
py
|
Python
|
src/app/integrations/zoomus/__init__.py
|
denkasyanov/education-backend
|
c796b6f2f1cc1cd09f83cab2ca0cc45344906ef5
|
[
"MIT"
] | 62
|
2021-09-22T18:38:26.000Z
|
2022-03-29T06:09:42.000Z
|
src/app/integrations/zoomus/__init__.py
|
denkasyanov/education-backend
|
c796b6f2f1cc1cd09f83cab2ca0cc45344906ef5
|
[
"MIT"
] | 50
|
2021-09-16T07:17:31.000Z
|
2022-03-26T12:06:58.000Z
|
src/app/integrations/zoomus/__init__.py
|
denkasyanov/education-backend
|
c796b6f2f1cc1cd09f83cab2ca0cc45344906ef5
|
[
"MIT"
] | 16
|
2021-10-17T17:43:31.000Z
|
2022-03-26T11:22:45.000Z
|
from app.integrations.zoomus.client import ZoomusClient
from app.integrations.zoomus.http import ZoomusHTTPException
__all__ = [
'ZoomusClient',
'ZoomusHTTPException',
]
| 22.375
| 60
| 0.787709
| 17
| 179
| 8.058824
| 0.588235
| 0.10219
| 0.277372
| 0.364964
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128492
| 179
| 7
| 61
| 25.571429
| 0.878205
| 0
| 0
| 0
| 0
| 0
| 0.173184
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ba0cbabf0514c6a67e64f2605ece0de4c0fcd080
| 61
|
py
|
Python
|
test3.py
|
zo-wa/hema
|
670101ba8619a6851cd3de7a2f39776517ee4ea5
|
[
"Apache-2.0"
] | null | null | null |
test3.py
|
zo-wa/hema
|
670101ba8619a6851cd3de7a2f39776517ee4ea5
|
[
"Apache-2.0"
] | null | null | null |
test3.py
|
zo-wa/hema
|
670101ba8619a6851cd3de7a2f39776517ee4ea5
|
[
"Apache-2.0"
] | null | null | null |
print('this is test3')
print('add=====')
print('add=====2')
| 12.2
| 22
| 0.540984
| 9
| 61
| 3.666667
| 0.666667
| 0.484848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036364
| 0.098361
| 61
| 5
| 23
| 12.2
| 0.563636
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
ba10c9bb288a30b8a3c24de98614343d2e778135
| 145
|
py
|
Python
|
src/sorted/0378.kth-smallest-element-in-a-sorted-matrix/kth-smallest-element-in-a-sorted-matrix.py
|
lyphui/Just-Code
|
e0c3c3ecb67cb805080ff686e88522b2bffe7741
|
[
"MIT"
] | 782
|
2019-11-19T08:20:49.000Z
|
2022-03-25T06:59:09.000Z
|
src/0378.kth-smallest-element-in-a-sorted-matrix/kth-smallest-element-in-a-sorted-matrix.py
|
Heitao5200/Just-Code
|
5bb3ee485a103418e693b7ec8e26dc84f3691c79
|
[
"MIT"
] | 1
|
2021-03-04T12:21:01.000Z
|
2021-03-05T01:23:54.000Z
|
src/0378.kth-smallest-element-in-a-sorted-matrix/kth-smallest-element-in-a-sorted-matrix.py
|
Heitao5200/Just-Code
|
5bb3ee485a103418e693b7ec8e26dc84f3691c79
|
[
"MIT"
] | 155
|
2019-11-20T08:20:42.000Z
|
2022-03-19T07:28:09.000Z
|
class Solution:
def kthSmallest(self, matrix: List[List[int]], k: int) -> int:
return sorted([x for row in matrix for x in row])[k-1]
| 48.333333
| 66
| 0.641379
| 25
| 145
| 3.72
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008772
| 0.213793
| 145
| 3
| 67
| 48.333333
| 0.807018
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
ba20d7f811b8b4b990005fc894ca59bc548e205c
| 5,913
|
py
|
Python
|
samples/client/petstore/python-experimental/test/test_format_test.py
|
kingdun3284/openapi-generator
|
07d145828f33e9e2d9e5be6694bfb1d29db4fe07
|
[
"Apache-2.0"
] | 3
|
2019-06-25T01:41:08.000Z
|
2019-10-01T15:49:15.000Z
|
samples/client/petstore/python-experimental/test/test_format_test.py
|
kingdun3284/openapi-generator
|
07d145828f33e9e2d9e5be6694bfb1d29db4fe07
|
[
"Apache-2.0"
] | 1
|
2022-02-27T20:01:11.000Z
|
2022-02-27T20:01:11.000Z
|
samples/client/petstore/python-experimental/test/test_format_test.py
|
kingdun3284/openapi-generator
|
07d145828f33e9e2d9e5be6694bfb1d29db4fe07
|
[
"Apache-2.0"
] | 2
|
2019-06-25T05:39:52.000Z
|
2019-07-09T15:47:02.000Z
|
# coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import petstore_api
from petstore_api.models.format_test import FormatTest # noqa: E501
from petstore_api import ApiValueError
class TestFormatTest(unittest.TestCase):
"""FormatTest unit test stubs"""
def setUp(self):
self.required_named_args = dict(
number=40.1,
byte='what',
date='2019-03-23',
password='rainbowtable'
)
def test_integer(self):
var_name = 'integer'
validations = FormatTest.validations[(var_name,)]
keyword_args = {}
keyword_args.update(self.required_named_args)
key_adder_pairs = [('inclusive_maximum', 1), ('inclusive_minimum', -1)]
for key, adder in key_adder_pairs:
# value outside the bounds throws an error
with self.assertRaises(ApiValueError):
keyword_args[var_name] = validations[key] + adder
FormatTest(**keyword_args)
# value inside the bounds works
keyword_args[var_name] = validations[key]
assert (getattr(FormatTest(**keyword_args), var_name) ==
validations[key])
def test_int32(self):
var_name = 'int32'
validations = FormatTest.validations[(var_name,)]
keyword_args = {}
keyword_args.update(self.required_named_args)
key_adder_pairs = [('inclusive_maximum', 1), ('inclusive_minimum', -1)]
for key, adder in key_adder_pairs:
# value outside the bounds throws an error
with self.assertRaises(ApiValueError):
keyword_args[var_name] = validations[key] + adder
FormatTest(**keyword_args)
# value inside the bounds works
keyword_args[var_name] = validations[key]
assert (getattr(FormatTest(**keyword_args), var_name) ==
validations[key])
def test_number(self):
var_name = 'number'
validations = FormatTest.validations[(var_name,)]
keyword_args = {}
keyword_args.update(self.required_named_args)
key_adder_pairs = [('inclusive_maximum', 1), ('inclusive_minimum', -1)]
for key, adder in key_adder_pairs:
# value outside the bounds throws an error
with self.assertRaises(ApiValueError):
keyword_args[var_name] = validations[key] + adder
FormatTest(**keyword_args)
# value inside the bounds works
keyword_args[var_name] = validations[key]
assert (getattr(FormatTest(**keyword_args), var_name) ==
validations[key])
def test_float(self):
var_name = 'float'
validations = FormatTest.validations[(var_name,)]
keyword_args = {}
keyword_args.update(self.required_named_args)
key_adder_pairs = [('inclusive_maximum', 1), ('inclusive_minimum', -1)]
for key, adder in key_adder_pairs:
# value outside the bounds throws an error
with self.assertRaises(ApiValueError):
keyword_args[var_name] = validations[key] + adder
FormatTest(**keyword_args)
# value inside the bounds works
keyword_args[var_name] = validations[key]
assert (getattr(FormatTest(**keyword_args), var_name) ==
validations[key])
def test_double(self):
var_name = 'double'
validations = FormatTest.validations[(var_name,)]
keyword_args = {}
keyword_args.update(self.required_named_args)
key_adder_pairs = [('inclusive_maximum', 1), ('inclusive_minimum', -1)]
for key, adder in key_adder_pairs:
# value outside the bounds throws an error
with self.assertRaises(ApiValueError):
keyword_args[var_name] = validations[key] + adder
FormatTest(**keyword_args)
# value inside the bounds works
keyword_args[var_name] = validations[key]
assert (getattr(FormatTest(**keyword_args), var_name) ==
validations[key])
def test_password(self):
var_name = 'password'
validations = FormatTest.validations[(var_name,)]
keyword_args = {}
keyword_args.update(self.required_named_args)
key_adder_pairs = [('max_length', 1), ('min_length', -1)]
for key, adder in key_adder_pairs:
# value outside the bounds throws an error
with self.assertRaises(ApiValueError):
keyword_args[var_name] = 'a'*(validations[key] + adder)
FormatTest(**keyword_args)
# value inside the bounds works
keyword_args[var_name] = 'a'*validations[key]
assert (getattr(FormatTest(**keyword_args), var_name) ==
'a'*validations[key])
def test_string(self):
var_name = 'string'
validations = FormatTest.validations[(var_name,)]
keyword_args = {}
keyword_args.update(self.required_named_args)
values_invalid = ['abc3', '1', '.', ' ', 'مرحبا', '']
for value_invalid in values_invalid:
# invalid values throw exceptions
with self.assertRaises(ApiValueError):
keyword_args[var_name] = value_invalid
FormatTest(**keyword_args)
# valid value works
value_valid = 'abcdz'
keyword_args[var_name] = value_valid
assert getattr(FormatTest(**keyword_args), var_name) == value_valid
if __name__ == '__main__':
unittest.main()
| 38.901316
| 174
| 0.615593
| 641
| 5,913
| 5.429017
| 0.174727
| 0.132759
| 0.084483
| 0.108621
| 0.751724
| 0.750287
| 0.739368
| 0.722701
| 0.708046
| 0.692241
| 0
| 0.009242
| 0.286318
| 5,913
| 152
| 175
| 38.901316
| 0.815403
| 0.133097
| 0
| 0.632075
| 0
| 0
| 0.056485
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 1
| 0.075472
| false
| 0.028302
| 0.04717
| 0
| 0.132075
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e8500dd0656ae7f804a26a70558221c761701103
| 109
|
py
|
Python
|
pytestsalt/salt/coverage/sitecustomize.py
|
dwoz/pytest-salt
|
9a6af1060cb087d201e842482b1dece317147cbb
|
[
"Apache-2.0"
] | 13
|
2017-04-12T00:20:15.000Z
|
2022-02-01T13:26:22.000Z
|
pytestsalt/salt/coverage/sitecustomize.py
|
dwoz/pytest-salt
|
9a6af1060cb087d201e842482b1dece317147cbb
|
[
"Apache-2.0"
] | 14
|
2017-03-10T10:39:56.000Z
|
2020-09-20T13:38:59.000Z
|
pytestsalt/salt/coverage/sitecustomize.py
|
dwoz/pytest-salt
|
9a6af1060cb087d201e842482b1dece317147cbb
|
[
"Apache-2.0"
] | 14
|
2017-03-01T17:21:23.000Z
|
2019-08-15T13:55:57.000Z
|
# -*- coding: utf-8 -*-
try:
import coverage
coverage.process_startup()
except ImportError:
pass
| 15.571429
| 30
| 0.651376
| 12
| 109
| 5.833333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011765
| 0.220183
| 109
| 6
| 31
| 18.166667
| 0.811765
| 0.192661
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
e87cef6dcf8482a242b06576a7d603fcaa190565
| 110
|
py
|
Python
|
python_library/StarKiller/StarKiller/integration/__init__.py
|
yut23/Microphysics
|
3c4985213c5e5b1ad2602b0bba2ce164b847361a
|
[
"BSD-3-Clause"
] | 16
|
2017-08-17T11:12:01.000Z
|
2021-06-10T23:11:08.000Z
|
python_library/StarKiller/StarKiller/integration/__init__.py
|
Youhichka/Microphysics
|
6f28333d40c9e15fdfbb1c4dc208e887fb5549c3
|
[
"BSD-3-Clause"
] | 533
|
2017-06-08T13:52:11.000Z
|
2022-01-28T16:13:29.000Z
|
python_library/StarKiller/StarKiller/integration/__init__.py
|
Youhichka/Microphysics
|
6f28333d40c9e15fdfbb1c4dc208e887fb5549c3
|
[
"BSD-3-Clause"
] | 34
|
2017-08-16T16:29:20.000Z
|
2021-09-09T16:19:15.000Z
|
from .sdc import SDCOde
from .integration import Integrator
from .numerical_jacobian import NumericalJacobian
| 27.5
| 49
| 0.863636
| 13
| 110
| 7.230769
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109091
| 110
| 3
| 50
| 36.666667
| 0.959184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e8d5da2e9579adc75953fb846ceab915e25d58ce
| 173
|
py
|
Python
|
python/packages/isce3/core/Pegtrans.py
|
piyushrpt/isce3
|
1741af321470cb5939693459765d11a19c5c6fc2
|
[
"Apache-2.0"
] | null | null | null |
python/packages/isce3/core/Pegtrans.py
|
piyushrpt/isce3
|
1741af321470cb5939693459765d11a19c5c6fc2
|
[
"Apache-2.0"
] | null | null | null |
python/packages/isce3/core/Pegtrans.py
|
piyushrpt/isce3
|
1741af321470cb5939693459765d11a19c5c6fc2
|
[
"Apache-2.0"
] | null | null | null |
#-*- coding: utf-8 -*-
# Import the extension
from .. import isceextension
class Pegtrans(isceextension.pyPegtrans):
"""
Wrapper for pyPegtrans.
"""
pass
| 14.416667
| 41
| 0.641618
| 17
| 173
| 6.529412
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007463
| 0.225434
| 173
| 11
| 42
| 15.727273
| 0.820896
| 0.381503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
2ce3c2046a9218e7d6952901a32a26a6574ddac3
| 52
|
py
|
Python
|
kivymd/uix/filemanager/__init__.py
|
marvelous-benji/KivyMD
|
4ab8dd339902597eaa9f8a4f9a80d8a6eb7d6053
|
[
"MIT"
] | 1,111
|
2015-07-15T02:31:09.000Z
|
2022-03-29T17:22:02.000Z
|
kivymd/uix/filemanager/__init__.py
|
marvelous-benji/KivyMD
|
4ab8dd339902597eaa9f8a4f9a80d8a6eb7d6053
|
[
"MIT"
] | 706
|
2015-06-10T22:24:13.000Z
|
2022-03-31T16:22:39.000Z
|
kivymd/uix/filemanager/__init__.py
|
marvelous-benji/KivyMD
|
4ab8dd339902597eaa9f8a4f9a80d8a6eb7d6053
|
[
"MIT"
] | 561
|
2015-07-15T04:57:23.000Z
|
2022-03-31T17:14:31.000Z
|
from .filemanager import MDFileManager # NOQA F401
| 26
| 51
| 0.807692
| 6
| 52
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 0.153846
| 52
| 1
| 52
| 52
| 0.886364
| 0.173077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2cf33f6293b5f8367f3d13d776456bf641818e3d
| 144
|
py
|
Python
|
dstr_common_lib/util.py
|
hbolzan/sql-to-rest-services-common
|
f2027139b3875e1db207cd9d4506b2a8f4dec4df
|
[
"BSD-2-Clause"
] | null | null | null |
dstr_common_lib/util.py
|
hbolzan/sql-to-rest-services-common
|
f2027139b3875e1db207cd9d4506b2a8f4dec4df
|
[
"BSD-2-Clause"
] | null | null | null |
dstr_common_lib/util.py
|
hbolzan/sql-to-rest-services-common
|
f2027139b3875e1db207cd9d4506b2a8f4dec4df
|
[
"BSD-2-Clause"
] | null | null | null |
import re
def clear_punctuation(document):
"""Remove all punctuation signals from document."""
return re.sub('\D', '', str(document))
| 20.571429
| 55
| 0.680556
| 18
| 144
| 5.388889
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 144
| 6
| 56
| 24
| 0.808333
| 0.3125
| 0
| 0
| 0
| 0
| 0.021505
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fa08a520bb7de4bf932a4bfe1635c5dcfc3c8476
| 51
|
py
|
Python
|
authApp/models/__init__.py
|
xlausae/4a-docs
|
e2d1038153f170d32d8edbe5ac3fe616ef554206
|
[
"MIT"
] | 1
|
2021-11-29T14:17:07.000Z
|
2021-11-29T14:17:07.000Z
|
authApp/models/__init__.py
|
xlausae/4a-docs
|
e2d1038153f170d32d8edbe5ac3fe616ef554206
|
[
"MIT"
] | 2
|
2021-11-18T17:09:21.000Z
|
2021-11-19T21:59:11.000Z
|
authApp/models/__init__.py
|
xlausae/4a-docs
|
e2d1038153f170d32d8edbe5ac3fe616ef554206
|
[
"MIT"
] | 1
|
2021-11-18T03:19:28.000Z
|
2021-11-18T03:19:28.000Z
|
from .user import User
from .product import Product
| 25.5
| 28
| 0.823529
| 8
| 51
| 5.25
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 51
| 2
| 28
| 25.5
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fa2bfbc39f8d6c85001f515f000874efd7f31d58
| 726
|
py
|
Python
|
tspreprocess/denoise/denoise.py
|
MaxBenChrist/tspreprocess
|
08695c287227855732e3d2213134f9e65649ac70
|
[
"MIT"
] | 61
|
2017-08-01T19:58:59.000Z
|
2022-03-13T08:10:12.000Z
|
tspreprocess/denoise/denoise.py
|
afcarl/tspreprocess
|
08695c287227855732e3d2213134f9e65649ac70
|
[
"MIT"
] | 9
|
2017-08-08T13:13:17.000Z
|
2021-01-10T23:23:46.000Z
|
tspreprocess/denoise/denoise.py
|
afcarl/tspreprocess
|
08695c287227855732e3d2213134f9e65649ac70
|
[
"MIT"
] | 9
|
2018-03-17T06:09:00.000Z
|
2021-01-10T23:24:07.000Z
|
# -*- coding: utf-8 -*-
# This file as well as the whole tspreprocess package are licenced under the MIT licence (see the LICENCE.txt)
# Maximilian Christ (maximilianchrist.com), 2017
from __future__ import absolute_import, division
def make_stationary():
"""
Takes a time series container ts and makes sure that all time series are stationary,
"""
pass
def remove_outlier_sigma():
"""
Removes outlier that are more than r sigma from the normal distribution
"""
pass
def apply_kalman_filtering():
"""
Applys a kalman filter to the time series.
"""
# todo: check https://github.com/pykalman/pykalman
# todo: maybe this is more transformation than denoising
pass
| 23.419355
| 110
| 0.69697
| 97
| 726
| 5.113402
| 0.690722
| 0.060484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008865
| 0.223141
| 726
| 30
| 111
| 24.2
| 0.870567
| 0.663912
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 0
| 1
| 0.428571
| true
| 0.428571
| 0.142857
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
fa31e575b815d98ac0fe0a25c6150e0d0354c84e
| 212
|
py
|
Python
|
pyefun/wxefun/component/ColourPickerCtrl.py
|
nuo010/pyefun
|
c1c4dfcfd382a67df005a66958da95aa13c30686
|
[
"Apache-2.0"
] | 94
|
2021-05-19T04:09:29.000Z
|
2022-03-27T04:02:30.000Z
|
pyefun/wxefun/component/ColourPickerCtrl.py
|
1431241631/pyefun
|
ac2290d4bcc8de16c195d2782f3eacd26e5e6ed4
|
[
"Apache-2.0"
] | 11
|
2021-05-22T06:44:19.000Z
|
2021-12-27T11:16:06.000Z
|
pyefun/wxefun/component/ColourPickerCtrl.py
|
1431241631/pyefun
|
ac2290d4bcc8de16c195d2782f3eacd26e5e6ed4
|
[
"Apache-2.0"
] | 21
|
2021-05-22T21:08:09.000Z
|
2022-02-24T02:39:06.000Z
|
import wx
from .wxControl import *
class 颜色选择器(wx.ColourPickerCtrl, 公用方法):
pass
def 取当前颜色(self):
return self.GetColour()
@组件_异常检测
def 置当前颜色(self,颜色):
return self.SetColour(颜色)
| 15.142857
| 39
| 0.641509
| 27
| 212
| 5
| 0.703704
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259434
| 212
| 13
| 40
| 16.307692
| 0.859873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.111111
| 0.222222
| 0.222222
| 0.777778
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
d705f932106417b717c794b42ce6822f81d4f1ee
| 359
|
py
|
Python
|
awx/main/utils/execution_environments.py
|
james-crowley/awx
|
5cd44cde991a9526810809544e7a8f12e6174711
|
[
"Apache-2.0"
] | 1
|
2021-12-27T14:33:10.000Z
|
2021-12-27T14:33:10.000Z
|
awx/main/utils/execution_environments.py
|
james-crowley/awx
|
5cd44cde991a9526810809544e7a8f12e6174711
|
[
"Apache-2.0"
] | 25
|
2021-04-01T05:53:59.000Z
|
2022-03-01T11:23:51.000Z
|
awx/main/utils/execution_environments.py
|
james-crowley/awx
|
5cd44cde991a9526810809544e7a8f12e6174711
|
[
"Apache-2.0"
] | null | null | null |
from django.conf import settings
from awx.main.models.execution_environments import ExecutionEnvironment
def get_execution_environment_default():
if settings.DEFAULT_EXECUTION_ENVIRONMENT is not None:
return settings.DEFAULT_EXECUTION_ENVIRONMENT
return ExecutionEnvironment.objects.filter(organization=None, managed_by_tower=True).first()
| 35.9
| 96
| 0.835655
| 42
| 359
| 6.904762
| 0.666667
| 0.206897
| 0.165517
| 0.241379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108635
| 359
| 9
| 97
| 39.888889
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.333333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
d73a6e790cf224b8372146b4e2fc36a2e33c043d
| 326
|
py
|
Python
|
Mapeo/conversion.py
|
alejoso76/Computaci-n-gr-fica
|
474a498a328b8951aa0bfa1db2d0d1f3d8cc914b
|
[
"MIT"
] | null | null | null |
Mapeo/conversion.py
|
alejoso76/Computaci-n-gr-fica
|
474a498a328b8951aa0bfa1db2d0d1f3d8cc914b
|
[
"MIT"
] | null | null | null |
Mapeo/conversion.py
|
alejoso76/Computaci-n-gr-fica
|
474a498a328b8951aa0bfa1db2d0d1f3d8cc914b
|
[
"MIT"
] | null | null | null |
import ConfigParser
#Manipula la libreria
interprete = ConfigParser.ConfigParser()
print 'Funciona'
interprete.read('proto.map')
#get
print interprete.get('seccion','n')
print interprete.get('seccion','m')
print interprete.get('seccion2','n')
#sections
print interprete.sections()
#items
print interprete.items('seccion')
| 18.111111
| 40
| 0.766871
| 39
| 326
| 6.410256
| 0.461538
| 0.3
| 0.216
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003356
| 0.08589
| 326
| 17
| 41
| 19.176471
| 0.83557
| 0.110429
| 0
| 0
| 0
| 0
| 0.171329
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.111111
| null | null | 0.666667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
ad113dbb8095bee3dcf37152d8e6763bb4899b86
| 43
|
py
|
Python
|
tda_mvc/utils/exception.py
|
jjjkkkjjj/cldvis-txtmodifier
|
647c57cdba325d54229b3300f7d0b18baa7b0b74
|
[
"MIT"
] | 3
|
2021-07-08T04:58:00.000Z
|
2022-03-06T11:19:08.000Z
|
tda_mvc/utils/exception.py
|
jjjkkkjjj/cldvis-txtmodifier
|
647c57cdba325d54229b3300f7d0b18baa7b0b74
|
[
"MIT"
] | 22
|
2020-11-09T15:36:23.000Z
|
2021-05-02T07:38:43.000Z
|
tda_mvc/utils/exception.py
|
jjjkkkjjj/cldvis-txtmodifier
|
647c57cdba325d54229b3300f7d0b18baa7b0b74
|
[
"MIT"
] | null | null | null |
class PredictionError(Exception):
pass
| 14.333333
| 33
| 0.767442
| 4
| 43
| 8.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 43
| 3
| 34
| 14.333333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ad250ca8012a464fa4675d74981f99b48364cf53
| 404
|
py
|
Python
|
exercises/slide_104/static-police/staticpolice/errors.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | null | null | null |
exercises/slide_104/static-police/staticpolice/errors.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | null | null | null |
exercises/slide_104/static-police/staticpolice/errors.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | null | null | null |
class StaticPoliceBaseError(Exception):
pass
class StaticPoliceBaseNotice(Exception):
pass
#
# Generic errors
#
class FunctionNotFoundError(StaticPoliceBaseError):
pass
class StaticPoliceTypeError(StaticPoliceBaseError):
pass
class StaticPoliceKeyNotFoundError(StaticPoliceBaseError):
pass
#
# Policy notices
#
class PolicySkipFunctionNotice(StaticPoliceBaseNotice):
pass
| 14.962963
| 58
| 0.792079
| 28
| 404
| 11.428571
| 0.464286
| 0.084375
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148515
| 404
| 26
| 59
| 15.538462
| 0.930233
| 0.071782
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ad2a3ddeb62c7d232a56e5b798c8f92746b0dade
| 40
|
py
|
Python
|
tests/__init__.py
|
emlove/pykulersky
|
079e3d4642b869963eb3c27297f7f79320413958
|
[
"Apache-2.0"
] | null | null | null |
tests/__init__.py
|
emlove/pykulersky
|
079e3d4642b869963eb3c27297f7f79320413958
|
[
"Apache-2.0"
] | null | null | null |
tests/__init__.py
|
emlove/pykulersky
|
079e3d4642b869963eb3c27297f7f79320413958
|
[
"Apache-2.0"
] | null | null | null |
"""Unit test package for pykulersky."""
| 20
| 39
| 0.7
| 5
| 40
| 5.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 40
| 1
| 40
| 40
| 0.8
| 0.825
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ad5adfc4f6bd7a8e8cf071ed3859360acbd1624c
| 35
|
py
|
Python
|
config.py
|
takavarasha-desire/habittracker1_1
|
392034a0d67f2be0e8e34648614fc90c851d9f51
|
[
"MIT"
] | null | null | null |
config.py
|
takavarasha-desire/habittracker1_1
|
392034a0d67f2be0e8e34648614fc90c851d9f51
|
[
"MIT"
] | null | null | null |
config.py
|
takavarasha-desire/habittracker1_1
|
392034a0d67f2be0e8e34648614fc90c851d9f51
|
[
"MIT"
] | null | null | null |
DB_URI = 'sqlite:///habits.sqlite'
| 17.5
| 34
| 0.685714
| 5
| 35
| 4.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 35
| 1
| 35
| 35
| 0.71875
| 0
| 0
| 0
| 0
| 0
| 0.657143
| 0.657143
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ad9710090688258ebe0dc9dd01ff927b232fee81
| 116
|
py
|
Python
|
old-stuff-for-reference/nightjar-base/nightjar-src/python-src/nightjar/backend/__init__.py
|
groboclown/nightjar-mesh
|
3655307b4a0ad00a0f18db835b3a0d04cb8e9615
|
[
"MIT"
] | 3
|
2019-12-23T23:46:02.000Z
|
2020-08-07T23:10:20.000Z
|
old-stuff-for-reference/nightjar-base/nightjar-src/python-src/nightjar/backend/__init__.py
|
groboclown/nightjar-mesh
|
3655307b4a0ad00a0f18db835b3a0d04cb8e9615
|
[
"MIT"
] | 2
|
2020-02-07T15:59:15.000Z
|
2020-08-05T21:55:27.000Z
|
old-stuff-for-reference/nightjar-base/nightjar-src/python-src/nightjar/backend/__init__.py
|
groboclown/nightjar-mesh
|
3655307b4a0ad00a0f18db835b3a0d04cb8e9615
|
[
"MIT"
] | 1
|
2020-05-28T00:46:05.000Z
|
2020-05-28T00:46:05.000Z
|
"""
Backend service definitions. This is the home for the nightjar extension points.
"""
from . import api, impl
| 16.571429
| 81
| 0.724138
| 16
| 116
| 5.25
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189655
| 116
| 6
| 82
| 19.333333
| 0.893617
| 0.698276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ad993ab0891be90569b0dde08c13e3c7610c878c
| 75
|
py
|
Python
|
comments/etc.py
|
AmanoTeam/comments
|
a30a17fe34e488e708acbc341bf85c7ed39cb27c
|
[
"MIT"
] | 3
|
2019-06-06T00:02:50.000Z
|
2020-06-09T19:54:39.000Z
|
comments/etc.py
|
AmanoTeam/comments
|
a30a17fe34e488e708acbc341bf85c7ed39cb27c
|
[
"MIT"
] | 2
|
2020-09-24T11:15:02.000Z
|
2020-09-25T15:14:40.000Z
|
comments/etc.py
|
AmanoTeam/comments
|
a30a17fe34e488e708acbc341bf85c7ed39cb27c
|
[
"MIT"
] | null | null | null |
class CustomDict(dict):
def __getattr__(self, key):
return self.get(key)
| 25
| 28
| 0.746667
| 11
| 75
| 4.727273
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 75
| 3
| 29
| 25
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
ad9c4edd6888fbc51518442c8e5b11b37fa4dda5
| 98
|
py
|
Python
|
src/facebook_utils/__init__.py
|
jvanasco/facebook_utils
|
38d76e696fe0c0cf5b6ced51a8d15afc9020c004
|
[
"BSD-3-Clause"
] | 5
|
2015-05-18T17:13:29.000Z
|
2019-12-12T22:21:47.000Z
|
src/facebook_utils/__init__.py
|
jvanasco/facebook_utils
|
38d76e696fe0c0cf5b6ced51a8d15afc9020c004
|
[
"BSD-3-Clause"
] | 4
|
2015-03-20T16:06:31.000Z
|
2019-05-03T09:14:05.000Z
|
src/facebook_utils/__init__.py
|
jvanasco/facebook_utils
|
38d76e696fe0c0cf5b6ced51a8d15afc9020c004
|
[
"BSD-3-Clause"
] | 3
|
2015-03-19T08:26:54.000Z
|
2017-10-09T03:55:09.000Z
|
__VERSION__ = "0.60.0dev0"
from .api_urls import *
from .core import *
from .exceptions import *
| 16.333333
| 26
| 0.72449
| 14
| 98
| 4.714286
| 0.714286
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060976
| 0.163265
| 98
| 5
| 27
| 19.6
| 0.743902
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a8eaec4b1503533010ba1346c59812f9ea62432e
| 24
|
py
|
Python
|
config.py
|
lidia1805/atividade_005_custom_breakout
|
b9a72a0c236cf667d8c5a03214fbe8768bbc3373
|
[
"MIT"
] | null | null | null |
config.py
|
lidia1805/atividade_005_custom_breakout
|
b9a72a0c236cf667d8c5a03214fbe8768bbc3373
|
[
"MIT"
] | null | null | null |
config.py
|
lidia1805/atividade_005_custom_breakout
|
b9a72a0c236cf667d8c5a03214fbe8768bbc3373
|
[
"MIT"
] | null | null | null |
print('I am config.py')
| 12
| 23
| 0.666667
| 5
| 24
| 3.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 24
| 1
| 24
| 24
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
d14160a204005aa47b3a384bd32fcf19298e9c42
| 4,801
|
py
|
Python
|
recolo/tests/test_grid_def_and_deflectometry.py
|
PolymerGuy/recon
|
14d66e3d5cb5fcc4868df326b045952daf139291
|
[
"MIT"
] | 4
|
2021-10-14T19:52:05.000Z
|
2022-03-10T11:41:34.000Z
|
recolo/tests/test_grid_def_and_deflectometry.py
|
PolymerGuy/recolo
|
05b14f0834fa675579eabdf43fac046259df19bb
|
[
"MIT"
] | null | null | null |
recolo/tests/test_grid_def_and_deflectometry.py
|
PolymerGuy/recolo
|
05b14f0834fa675579eabdf43fac046259df19bb
|
[
"MIT"
] | 1
|
2022-03-04T13:09:01.000Z
|
2022-03-04T13:09:01.000Z
|
from recolo.deflectomerty import disp_from_grids, angle_from_disp
from recolo.artificial_grid_deformation import deform_grid_from_deflection
from recolo.slope_integration import disp_from_slopes
from unittest import TestCase
import numpy as np
class Test_DeformGridNoise(TestCase):
def test_noise_with_different_std(self):
rel_tol = 0.05
pixel_size = 1
mirror_grid_dist = 500.
grid_pitch = 5
upscale = 1
oversampling = 5
n_pts_x = 100
n_pts_y = 100
n_imgs = 50
noise_stds = np.arange(0.001,0.01,0.1)
for noise_std in noise_stds:
undeformed_field = np.zeros((n_pts_x,n_pts_y))
grids = [deform_grid_from_deflection(undeformed_field,pixel_size,mirror_grid_dist,grid_pitch,upscale,oversampling,img_noise_std=noise_std) for _ in range(n_imgs)]
noise_meas_std = np.mean(np.std(grids,axis=0))
norm_noise_std = noise_meas_std/(np.max(grids)-np.min(grids))
if np.abs(norm_noise_std-noise_std)/noise_std > rel_tol:
self.fail("The noise level is not correct. Correct is %f, estimated is %f"%(noise_std,norm_noise_std))
class Test_DeformGridAndRunDeflectometry(TestCase):
def test_half_sine_deflection_no_upscale(self):
rel_tol = 0.01
pixel_size = 1
mirror_grid_dist = 500.
grid_pitch = 5
upscale = 1
oversampling = 5
deflection_amp = 0.1
n_pts_x = 200
n_pts_y = 200
xs,ys = np.meshgrid(np.linspace(0,1,n_pts_x),np.linspace(0,1,n_pts_y))
deflection_field = deflection_amp * np.sin(np.pi*xs)*np.sin(np.pi*ys)
undeformed_field = np.zeros_like(deflection_field)
undeformed_grid = deform_grid_from_deflection(undeformed_field,pixel_size,mirror_grid_dist,grid_pitch,upscale,oversampling)
deformed_grid = deform_grid_from_deflection(deflection_field,pixel_size,mirror_grid_dist,grid_pitch,upscale,oversampling)
disp_x,disp_y = disp_from_grids(undeformed_grid,deformed_grid,grid_pitch,correct_phase=True)
angle_x = angle_from_disp(disp_x,mirror_grid_dist)
angle_y = angle_from_disp(disp_y,mirror_grid_dist)
# Add axis
angle_x = angle_x[np.newaxis,:,:]
angle_y = angle_y[np.newaxis,:,:]
reconstucted_defl = disp_from_slopes(angle_x,angle_y,pixel_size,zero_at="bottom corners")
# As a reduced field of view causes the a shift of the whole field, a manual correction is performed
reconstucted_defl = reconstucted_defl + deflection_field[4*grid_pitch,4*grid_pitch]
cropped_deflection = deflection_field[4*grid_pitch:-4*grid_pitch,4*grid_pitch:-4*grid_pitch]
rel_peak_error = np.max(np.abs(reconstucted_defl[0,:,:]-cropped_deflection))/deflection_amp
if rel_peak_error > rel_tol:
self.fail("The peak error of %f is larger than the tolerance of %f"%(rel_peak_error,rel_tol))
class Test_DeformGridAndRunDeflectometry_Triangular(TestCase):
def test_half_sine_deflection_no_upscale(self):
rel_tol = 0.01
window="triangular"
pixel_size = 1
mirror_grid_dist = 500.
grid_pitch = 5
upscale = 1
oversampling = 5
deflection_amp = 0.1
n_pts_x = 200
n_pts_y = 200
xs,ys = np.meshgrid(np.linspace(0,1,n_pts_x),np.linspace(0,1,n_pts_y))
deflection_field = deflection_amp * np.sin(np.pi*xs)*np.sin(np.pi*ys)
undeformed_field = np.zeros_like(deflection_field)
undeformed_grid = deform_grid_from_deflection(undeformed_field,pixel_size,mirror_grid_dist,grid_pitch,upscale,oversampling)
deformed_grid = deform_grid_from_deflection(deflection_field,pixel_size,mirror_grid_dist,grid_pitch,upscale,oversampling)
disp_x,disp_y = disp_from_grids(undeformed_grid,deformed_grid,grid_pitch,correct_phase=True,window=window)
angle_x = angle_from_disp(disp_x,mirror_grid_dist)
angle_y = angle_from_disp(disp_y,mirror_grid_dist)
# Add axis
angle_x = angle_x[np.newaxis,:,:]
angle_y = angle_y[np.newaxis,:,:]
reconstucted_defl = disp_from_slopes(angle_x,angle_y,pixel_size,zero_at="bottom corners")
# As a reduced field of view causes the a shift of the whole field, a manual correction is performed
reconstucted_defl = reconstucted_defl + deflection_field[grid_pitch-1,grid_pitch-1]
cropped_deflection = deflection_field[grid_pitch-1:-grid_pitch+1,grid_pitch-1:-grid_pitch+1]
rel_peak_error = np.max(np.abs(reconstucted_defl[0,:,:]-cropped_deflection))/deflection_amp
if rel_peak_error > rel_tol:
self.fail("The peak error of %f is larger than the tolerance of %f"%(rel_peak_error,rel_tol))
| 38.717742
| 174
| 0.707769
| 718
| 4,801
| 4.377437
| 0.167131
| 0.062997
| 0.053452
| 0.045816
| 0.762647
| 0.757238
| 0.757238
| 0.757238
| 0.734967
| 0.709513
| 0
| 0.022566
| 0.206207
| 4,801
| 123
| 175
| 39.03252
| 0.802152
| 0.044782
| 0
| 0.64557
| 0
| 0
| 0.045872
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037975
| false
| 0
| 0.063291
| 0
| 0.139241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
66f2d7c87b2402248ccb211de6ef5cb9e4a33160
| 279
|
py
|
Python
|
mosdef_cassandra/examples/__init__.py
|
rsdefever/mosdef_cassandra
|
ceb8cbd3e402e2a1af9a6327c93c2f3097d6b108
|
[
"MIT"
] | null | null | null |
mosdef_cassandra/examples/__init__.py
|
rsdefever/mosdef_cassandra
|
ceb8cbd3e402e2a1af9a6327c93c2f3097d6b108
|
[
"MIT"
] | null | null | null |
mosdef_cassandra/examples/__init__.py
|
rsdefever/mosdef_cassandra
|
ceb8cbd3e402e2a1af9a6327c93c2f3097d6b108
|
[
"MIT"
] | 1
|
2020-03-29T21:07:44.000Z
|
2020-03-29T21:07:44.000Z
|
from .nvt import run_nvt
from .npt import run_npt
from .gcmc import run_gcmc
from .gemc import run_gemc
from .nvt_spce import run_nvt_spce
from .nvt_mixture import run_nvt_mixture
from .gcmc_adsorption import run_gcmc_adsorption
from .gcmc_restricted import run_gcmc_restricted
| 27.9
| 48
| 0.853047
| 48
| 279
| 4.625
| 0.229167
| 0.324324
| 0.162162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11828
| 279
| 9
| 49
| 31
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
0f0906ee90b3a4076944d7dddb126cda09b76b6e
| 48
|
py
|
Python
|
examples/01_using_from_python_without_class_reference.py
|
soft-matter/pimsviewer
|
9263ece121a58a0504c6e4d319ec6e18d1bb460a
|
[
"BSD-3-Clause"
] | 9
|
2018-06-26T06:49:34.000Z
|
2022-03-01T19:54:56.000Z
|
examples/01_using_from_python_without_class_reference.py
|
soft-matter/pimsviewer
|
9263ece121a58a0504c6e4d319ec6e18d1bb460a
|
[
"BSD-3-Clause"
] | 14
|
2017-03-02T17:34:08.000Z
|
2020-06-23T15:09:23.000Z
|
examples/01_using_from_python_without_class_reference.py
|
soft-matter/pimsviewer
|
9263ece121a58a0504c6e4d319ec6e18d1bb460a
|
[
"BSD-3-Clause"
] | 6
|
2017-03-02T18:36:20.000Z
|
2020-11-22T23:27:14.000Z
|
from pimsviewer import run
run('path/to/file')
| 12
| 26
| 0.75
| 8
| 48
| 4.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 48
| 3
| 27
| 16
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
0f50b51d9d72f98cc243e7e6e749ee36243e8bb7
| 87
|
py
|
Python
|
benchctl/repl/magics.py
|
openlabequipment/benchd
|
81a21ad4f7f43b5035c7fe569090a7e310c4621b
|
[
"BSD-3-Clause"
] | null | null | null |
benchctl/repl/magics.py
|
openlabequipment/benchd
|
81a21ad4f7f43b5035c7fe569090a7e310c4621b
|
[
"BSD-3-Clause"
] | null | null | null |
benchctl/repl/magics.py
|
openlabequipment/benchd
|
81a21ad4f7f43b5035c7fe569090a7e310c4621b
|
[
"BSD-3-Clause"
] | null | null | null |
from IPython.core import magic
@magic.magics_class
class Magics(magic.Magics):
pass
| 12.428571
| 30
| 0.793103
| 13
| 87
| 5.230769
| 0.615385
| 0.323529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126437
| 87
| 6
| 31
| 14.5
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
0f89b1f98493bd62bb187af3da4a20ddb8980544
| 52
|
py
|
Python
|
pdfdataextractor/__init__.py
|
cat-lemonade/PDFDataExtractor
|
ff6bef8d5a41c9f92c4601981ae1d5eb078f4c53
|
[
"MIT"
] | 4
|
2021-06-30T11:38:25.000Z
|
2021-06-30T12:29:24.000Z
|
pdfdataextractor/__init__.py
|
cat-lemonade/PDFDataExtractor
|
ff6bef8d5a41c9f92c4601981ae1d5eb078f4c53
|
[
"MIT"
] | null | null | null |
pdfdataextractor/__init__.py
|
cat-lemonade/PDFDataExtractor
|
ff6bef8d5a41c9f92c4601981ae1d5eb078f4c53
|
[
"MIT"
] | null | null | null |
# # -*- coding: utf-8 -*-
from .extraction import *
| 26
| 26
| 0.576923
| 6
| 52
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02381
| 0.192308
| 52
| 2
| 27
| 26
| 0.690476
| 0.403846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7e149821d21338cd912e51b8f7fdb804f5ae7b58
| 7,356
|
py
|
Python
|
tests/test_entities.py
|
Orange-OpenSource/python-ngsild-client
|
23ff31506aabd23c75befece1fb3d4536903cb2a
|
[
"Apache-2.0"
] | 7
|
2022-02-25T09:55:28.000Z
|
2022-03-25T20:48:01.000Z
|
tests/test_entities.py
|
Orange-OpenSource/python-ngsild-client
|
23ff31506aabd23c75befece1fb3d4536903cb2a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_entities.py
|
Orange-OpenSource/python-ngsild-client
|
23ff31506aabd23c75befece1fb3d4536903cb2a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Software Name: ngsildclient
# SPDX-FileCopyrightText: Copyright (c) 2021 Orange
# SPDX-License-Identifier: Apache 2.0
#
# This software is distributed under the Apache 2.0;
# see the NOTICE file for more details.
#
# Author: Fabien BATTELLO <fabien.battello@orange.com> et al.
import logging
import pytest
from pytest_mock.plugin import MockerFixture
from ngsildclient.api.client import Client
from ngsildclient.api.exceptions import (
NgsiAlreadyExistsError,
NgsiResourceNotFoundError,
ProblemDetails,
)
# from .common import sample_entity, mocked_connected
from .common import sample_entity
logger = logging.getLogger(__name__)
def test_api_create(mocked_connected, requests_mock):
requests_mock.post(
"http://localhost:1026/ngsi-ld/v1/entities/",
request_headers={"Content-Type": "application/ld+json"},
headers={"Location": "urn:ngsi-ld:AirQualityObserved:RZ:Obsv4567"},
status_code=201,
)
client = Client()
res = client.entities.create(sample_entity)
assert res == sample_entity
def test_api_create_error_already_exists(mocked_connected, requests_mock):
requests_mock.post(
"http://localhost:1026/ngsi-ld/v1/entities/",
request_headers={"Content-Type": "application/ld+json"},
status_code=409,
json={
"type": "https://uri.etsi.org/ngsi-ld/errors/AlreadyExists",
"title": "Entity already exists",
"detail": "urn:ngsi-ld:AirQualityObserved:RZ:Obsv4567",
},
)
client = Client()
with pytest.raises(NgsiAlreadyExistsError) as excinfo:
client.entities.create(sample_entity)
logger.info(f"{type(excinfo.value)=}")
assert (
excinfo.value.problemdetails.type
== "https://uri.etsi.org/ngsi-ld/errors/AlreadyExists"
)
assert excinfo.value.problemdetails.title == "Entity already exists"
assert excinfo.value.problemdetails.status == 409
assert (
excinfo.value.problemdetails.detail
== "urn:ngsi-ld:AirQualityObserved:RZ:Obsv4567"
)
assert excinfo.value.problemdetails.instance is None
assert excinfo.value.problemdetails.extension == {}
def test_api_retrieve(mocked_connected, requests_mock):
requests_mock.get(
"http://localhost:1026/ngsi-ld/v1/entities/urn:ngsi-ld:AirQualityObserved:RZ:Obsv4567",
request_headers={"Accept": "application/ld+json"},
status_code=200,
json=sample_entity.to_dict(),
)
client = Client()
res = client.entities.get("urn:ngsi-ld:AirQualityObserved:RZ:Obsv4567")
assert res == sample_entity
def test_api_retrieve_error_not_found(mocked_connected, requests_mock):
requests_mock.get(
"http://localhost:1026/ngsi-ld/v1/entities/urn:ngsi-ld:AirQualityObserved:RZ:Obsv4568",
request_headers={"Accept": "application/ld+json"},
status_code=404,
json={
"type": "https://uri.etsi.org/ngsi-ld/errors/ResourceNotFound",
"title": "Entity Not Found",
"detail": "urn:ngsi-ld:AirQualityObserved:RZ:Obsv4568",
},
)
client = Client()
with pytest.raises(NgsiResourceNotFoundError) as excinfo:
client.entities.get("urn:ngsi-ld:AirQualityObserved:RZ:Obsv4568")
assert (
excinfo.value.problemdetails.type == "https://uri.etsi.org/ngsi-ld/errors/ResourceNotFound"
)
assert excinfo.value.problemdetails.title == "Entity Not Found"
assert excinfo.value.problemdetails.status == 404
assert (
excinfo.value.problemdetails.detail == "urn:ngsi-ld:AirQualityObserved:RZ:Obsv4568"
)
assert excinfo.value.problemdetails.instance is None
assert excinfo.value.problemdetails.extension == {}
def test_api_exists(mocked_connected, requests_mock):
requests_mock.get(
"http://localhost:1026/ngsi-ld/v1/entities/urn:ngsi-ld:AirQualityObserved:RZ:Obsv4567",
request_headers={"Accept": "application/ld+json"},
status_code=200,
json=sample_entity.to_dict(),
)
client = Client()
res = client._entities.exists("urn:ngsi-ld:AirQualityObserved:RZ:Obsv4567")
assert res
def test_api_delete(mocked_connected, requests_mock):
requests_mock.delete(
"http://localhost:1026/ngsi-ld/v1/entities/urn:ngsi-ld:AirQualityObserved:RZ:Obsv4567",
status_code=200,
)
client = Client()
res = client._entities.delete("urn:ngsi-ld:AirQualityObserved:RZ:Obsv4567")
assert res
def test_api_delete_error_not_found(mocked_connected, requests_mock):
requests_mock.delete(
"http://localhost:1026/ngsi-ld/v1/entities/urn:ngsi-ld:AirQualityObserved:RZ:Obsv4568",
status_code=404,
json={
"type": "https://uri.etsi.org/ngsi-ld/errors/ResourceNotFound",
"title": "Entity Not Found",
"detail": "urn:ngsi-ld:AirQualityObserved:RZ:Obsv4568",
},
)
client = Client()
with pytest.raises(NgsiResourceNotFoundError) as excinfo:
client._entities.delete("urn:ngsi-ld:AirQualityObserved:RZ:Obsv4568")
assert (
excinfo.value.problemdetails.type
== "https://uri.etsi.org/ngsi-ld/errors/ResourceNotFound"
)
assert excinfo.value.problemdetails.title == "Entity Not Found"
assert excinfo.value.problemdetails.status == 404
assert (
excinfo.value.problemdetails.detail
== "urn:ngsi-ld:AirQualityObserved:RZ:Obsv4568"
)
assert excinfo.value.problemdetails.instance is None
assert excinfo.value.problemdetails.extension == {}
def test_api_upsert_existent_entity(mocked_connected, mocker: MockerFixture):
client = Client()
pd = ProblemDetails(
"AlreadyExists",
"Entity already exists",
409,
"urn:ngsi-ld:AirQualityObserved:RZ:Obsv4567",
)
mocked_create = mocker.patch.object(
client._entities,
"create",
side_effect=[NgsiAlreadyExistsError(pd), sample_entity],
)
mocked_delete = mocker.patch.object(client._entities, "delete", return_value=True)
res = client._entities.upsert(sample_entity)
assert mocked_create.call_count == 2
assert mocked_delete.call_count == 1
assert res == sample_entity
def test_api_upsert_nonexistent_entity(mocked_connected, mocker: MockerFixture):
client = Client()
mocked_create = mocker.patch.object(
client._entities, "create", return_value=sample_entity
)
mocked_delete = mocker.patch.object(client._entities, "delete", return_value=True)
res = client._entities.upsert(sample_entity)
assert mocked_create.call_count == 1
assert mocked_delete.call_count == 0
assert res == sample_entity
def test_api_update_existent_entity(mocked_connected, mocker: MockerFixture):
client = Client()
mocker.patch.object(client._entities, "exists", return_value=True)
mocker.patch.object(client._entities, "delete", return_value=True)
mocker.patch.object(client._entities, "create", return_value=sample_entity)
res = client._entities.update(sample_entity)
assert res == sample_entity
def test_api_update_nonexistent_entity(mocked_connected, mocker: MockerFixture):
client = Client()
mocker.patch.object(client._entities, "exists", return_value=False)
res = client._entities.update(sample_entity)
assert res is None
| 36.415842
| 99
| 0.703371
| 852
| 7,356
| 5.911972
| 0.156103
| 0.036927
| 0.032162
| 0.096486
| 0.849911
| 0.796704
| 0.780822
| 0.757792
| 0.660711
| 0.616041
| 0
| 0.025331
| 0.178902
| 7,356
| 201
| 100
| 36.597015
| 0.808609
| 0.045677
| 0
| 0.50303
| 0
| 0.030303
| 0.250499
| 0.081027
| 0
| 0
| 0
| 0
| 0.181818
| 1
| 0.066667
| false
| 0
| 0.036364
| 0
| 0.10303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7e31508f47fba6ec7681cf8a090e01921e85724b
| 219
|
py
|
Python
|
firstApp/views.py
|
coreymyster/django-hello
|
fbbcbf0f06ae1792e0913c36dbe7a0b211ba68d5
|
[
"MIT"
] | null | null | null |
firstApp/views.py
|
coreymyster/django-hello
|
fbbcbf0f06ae1792e0913c36dbe7a0b211ba68d5
|
[
"MIT"
] | null | null | null |
firstApp/views.py
|
coreymyster/django-hello
|
fbbcbf0f06ae1792e0913c36dbe7a0b211ba68d5
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
# If there is an http response then "Hello World!" will
# be diaplyed on the screen
def myView(request):
return HttpResponse('Hello World!')
| 27.375
| 55
| 0.771689
| 32
| 219
| 5.28125
| 0.78125
| 0.118343
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159817
| 219
| 7
| 56
| 31.285714
| 0.918478
| 0.360731
| 0
| 0
| 0
| 0
| 0.087591
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
7e533dc247c57c6bfbd6c6977f8fed10567b5712
| 120
|
py
|
Python
|
gdesk/panels/scriptwiz/__init__.py
|
thocoo/gamma-desk
|
9cb63a65fe23e30e155b3beca862f369b7fa1b7e
|
[
"Apache-2.0"
] | null | null | null |
gdesk/panels/scriptwiz/__init__.py
|
thocoo/gamma-desk
|
9cb63a65fe23e30e155b3beca862f369b7fa1b7e
|
[
"Apache-2.0"
] | 8
|
2021-04-09T11:31:43.000Z
|
2021-06-09T09:07:18.000Z
|
gdesk/panels/scriptwiz/__init__.py
|
thocoo/gamma-desk
|
9cb63a65fe23e30e155b3beca862f369b7fa1b7e
|
[
"Apache-2.0"
] | null | null | null |
from ... import config
from .proxy import ScriptWizardProxy
if config['qapp']:
from .panel import ScriptWizardPanel
| 24
| 40
| 0.766667
| 14
| 120
| 6.571429
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 120
| 5
| 40
| 24
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0.033058
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7e5cc17a156e042682d2850c0d8683289751e982
| 116
|
py
|
Python
|
econordeste/video/admin.py
|
klebercode/econordeste
|
a9370c3d823ef330f7fac761f162cc928549957c
|
[
"MIT"
] | null | null | null |
econordeste/video/admin.py
|
klebercode/econordeste
|
a9370c3d823ef330f7fac761f162cc928549957c
|
[
"MIT"
] | null | null | null |
econordeste/video/admin.py
|
klebercode/econordeste
|
a9370c3d823ef330f7fac761f162cc928549957c
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from econordeste.video.models import VideoEntry
admin.site.register(VideoEntry)
| 16.571429
| 47
| 0.836207
| 15
| 116
| 6.466667
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 116
| 6
| 48
| 19.333333
| 0.932692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.